T53161 elasticsearch can index nodes now. cli command. NOTE config changes!!
This commit is contained in:
@@ -2,9 +2,11 @@ def setup_app(app):
|
||||
from . import encoding, blender_id, projects, local_auth, file_storage
|
||||
from . import users, nodes, latest, blender_cloud, service, activities
|
||||
from . import organizations
|
||||
from . import search
|
||||
|
||||
encoding.setup_app(app, url_prefix='/encoding')
|
||||
blender_id.setup_app(app, url_prefix='/blender_id')
|
||||
search.setup_app(app, url_prefix='/newsearch')
|
||||
projects.setup_app(app, api_prefix='/p')
|
||||
local_auth.setup_app(app, url_prefix='/auth')
|
||||
file_storage.setup_app(app, url_prefix='/storage')
|
||||
|
@@ -0,0 +1,13 @@
|
||||
import logging
|
||||
|
||||
#import bson
|
||||
#from flask import current_app
|
||||
|
||||
from .routes import blueprint_search
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def setup_app(app, url_prefix: str =None):
|
||||
app.register_api_blueprint(
|
||||
blueprint_search, url_prefix=url_prefix)
|
||||
|
@@ -26,15 +26,16 @@ autocomplete = es.analyzer(
|
||||
|
||||
|
||||
class User(es.DocType):
|
||||
"""
|
||||
Elastic document describing user
|
||||
"""
|
||||
"""Elastic document describing user."""
|
||||
|
||||
name = es.String(
|
||||
fielddata=True,
|
||||
analyzer=autocomplete,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
index = 'users'
|
||||
|
||||
|
||||
class Node(es.DocType):
|
||||
"""
|
||||
@@ -43,12 +44,39 @@ class Node(es.DocType):
|
||||
|
||||
node_type = es.Keyword()
|
||||
|
||||
x_code = es.String(
|
||||
multi=True,
|
||||
objectID = es.Keyword()
|
||||
|
||||
name = es.String(
|
||||
fielddata=True,
|
||||
analyzer=autocomplete,
|
||||
analyzer=autocomplete
|
||||
)
|
||||
|
||||
user_id = es.Keyword()
|
||||
user_name = es.String(
|
||||
fielddata=True,
|
||||
analyzer=autocomplete
|
||||
)
|
||||
|
||||
description = es.String()
|
||||
|
||||
is_free = es.Boolean()
|
||||
|
||||
project_id = es.Keyword()
|
||||
project_name = es.String()
|
||||
|
||||
media = es.Keyword()
|
||||
|
||||
picture_url = es.Keyword()
|
||||
|
||||
tags = es.Keyword(multi=True)
|
||||
license_notes = es.String()
|
||||
|
||||
created_at = es.Date()
|
||||
updated_at = es.Date()
|
||||
|
||||
class Meta:
|
||||
index = 'nodes'
|
||||
|
||||
|
||||
def create_doc_from_user_data(user_to_index):
|
||||
doc_id = user_to_index['objectID']
|
||||
@@ -59,8 +87,25 @@ def create_doc_from_user_data(user_to_index):
|
||||
def create_doc_from_node_data(node_to_index):
|
||||
|
||||
# node stuff
|
||||
doc_id = node_to_index['objectID']
|
||||
doc_id = str(node_to_index['objectID'])
|
||||
doc = Node(_id=doc_id)
|
||||
|
||||
doc.node_type = node_to_index['node_type']
|
||||
doc.name = node_to_index['name']
|
||||
doc.user_id = str(node_to_index['user']['_id'])
|
||||
doc.user_name = node_to_index['user']['full_name']
|
||||
doc.project_id = str(node_to_index['project']['_id'])
|
||||
doc.project_name = node_to_index['project']['name']
|
||||
|
||||
if node_to_index['node_type'] == 'asset':
|
||||
doc.media = node_to_index['media']
|
||||
|
||||
doc.picture_url = node_to_index.get('picture')
|
||||
|
||||
doc.tags = node_to_index.get('tags')
|
||||
doc.license_notes = node_to_index.get('license_notes')
|
||||
|
||||
doc.created_at = node_to_index['created']
|
||||
doc.updated_at = node_to_index['updated']
|
||||
|
||||
return doc
|
||||
|
@@ -1,10 +1,25 @@
|
||||
import logging
|
||||
from pillar import current_app
|
||||
from elasticsearch_dsl.connections import connections
|
||||
|
||||
from . import documents
|
||||
|
||||
|
||||
elk_hosts = current_app.config['ELASTIC_SEARCH_HOSTS']
|
||||
|
||||
connections.create_connection(
|
||||
hosts=elk_hosts,
|
||||
sniff_on_start=True,
|
||||
timeout=20)
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def push_updated_user(user_to_index: dict):
|
||||
"""Push an update to the Algolia index when a user item is updated"""
|
||||
"""
|
||||
Push an update to the Elastic index when
|
||||
a user item is updated.
|
||||
"""
|
||||
|
||||
log.warning(
|
||||
'WIP USER ELK INDEXING %s %s',
|
||||
@@ -15,11 +30,18 @@ def push_updated_user(user_to_index: dict):
|
||||
def index_node_save(node_to_index: dict):
|
||||
|
||||
log.warning(
|
||||
'WIP USER NODE INDEXING %s',
|
||||
'ELK NODE INDEXING %s',
|
||||
node_to_index.get('objectID'))
|
||||
|
||||
log.warning(node_to_index)
|
||||
|
||||
doc = documents.create_doc_from_node_data(node_to_index)
|
||||
|
||||
log.warning('CREATED ELK DOC')
|
||||
doc.save()
|
||||
|
||||
|
||||
def index_node_delete(delete_id: str):
|
||||
|
||||
log.warning(
|
||||
'WIP NODE DELETE INDEXING %s', delete_id)
|
||||
log.warning('NODE DELETE INDEXING %s', delete_id)
|
||||
documents.Node(id=delete_id).delete()
|
||||
|
69
pillar/api/search/index.py
Normal file
69
pillar/api/search/index.py
Normal file
@@ -0,0 +1,69 @@
|
||||
import logging
|
||||
# import time
|
||||
|
||||
# from elasticsearch import helpers
|
||||
# import elasticsearch
|
||||
|
||||
# from elasticsearch.client import IndicesClient
|
||||
|
||||
from elasticsearch.exceptions import NotFoundError
|
||||
from elasticsearch_dsl.connections import connections
|
||||
import elasticsearch_dsl as es
|
||||
|
||||
from pillar import current_app
|
||||
|
||||
from . import documents
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ResetIndexTask(object):
|
||||
"""
|
||||
Clear and build index / mapping
|
||||
"""
|
||||
index = ''
|
||||
doc_types = []
|
||||
name = 'remove index'
|
||||
|
||||
def __init__(self):
|
||||
|
||||
if not self.index:
|
||||
raise ValueError("No index specified")
|
||||
|
||||
if not self.doc_types:
|
||||
raise ValueError("No doc_types specified")
|
||||
|
||||
connections.create_connection(
|
||||
hosts=current_app.config['ELASTIC_SEARCH_HOSTS'],
|
||||
# sniff_on_start=True,
|
||||
retry_on_timeout=True,
|
||||
)
|
||||
|
||||
def execute(self):
|
||||
|
||||
idx = es.Index(self.index)
|
||||
|
||||
try:
|
||||
idx.delete(ignore=404)
|
||||
log.info("Deleted index %s", self.index)
|
||||
except AttributeError:
|
||||
log.warning("Could not delete index '%s', ignoring", self.index)
|
||||
except NotFoundError:
|
||||
log.warning("Could not delete index '%s', ignoring", self.index)
|
||||
|
||||
# create doc types
|
||||
for dt in self.doc_types:
|
||||
idx.doc_type(dt)
|
||||
|
||||
# create index
|
||||
idx.create()
|
||||
|
||||
|
||||
class ResetNodeIndex(ResetIndexTask):
|
||||
index = current_app.config['ELASTIC_INDICES']['NODE']
|
||||
doc_types = [documents.Node]
|
||||
|
||||
|
||||
def reset_node_index():
|
||||
resettask = ResetNodeIndex()
|
||||
resettask.execute()
|
42
pillar/api/search/queries.py
Normal file
42
pillar/api/search/queries.py
Normal file
@@ -0,0 +1,42 @@
|
||||
import logging
|
||||
import json
|
||||
from elasticsearch import Elasticsearch
|
||||
from elasticsearch_dsl import Search, Q
|
||||
from elasticsearch_dsl.connections import connections
|
||||
|
||||
from pillar import current_app
|
||||
|
||||
#elk_hosts = current_app.config['ELASTIC_SEARCH_HOSTS']
|
||||
#
|
||||
#connections.create_connection(
|
||||
# hosts=elk_hosts,
|
||||
# sniff_on_start=True,
|
||||
# timeout=20)
|
||||
#
|
||||
client = Elasticsearch()
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def do_search(query: str) -> dict:
|
||||
"""
|
||||
Given user input search for node/stuff
|
||||
"""
|
||||
should = [
|
||||
Q('match', name=query),
|
||||
Q('match', user_name=query),
|
||||
Q('match', project_name=query),
|
||||
Q('match', description=query),
|
||||
Q('term', media=query),
|
||||
Q('term', tags=query),
|
||||
]
|
||||
bool_query = Q('bool', should=should)
|
||||
search = Search(using=client)
|
||||
search.query = bool_query
|
||||
|
||||
if current_app.config['DEBUG']:
|
||||
log.debug(json.dumps(search.to_dict(), indent=4))
|
||||
|
||||
response = search.execute()
|
||||
|
||||
return response.to_dict()
|
33
pillar/api/search/routes.py
Normal file
33
pillar/api/search/routes.py
Normal file
@@ -0,0 +1,33 @@
|
||||
import json
|
||||
import logging
|
||||
|
||||
from bson import ObjectId
|
||||
from flask import Blueprint, request, current_app, make_response, url_for
|
||||
from flask import Response
|
||||
from werkzeug import exceptions as wz_exceptions
|
||||
|
||||
from pillar.api.utils import authorization, jsonify, str2id
|
||||
from pillar.api.utils import mongo
|
||||
from pillar.api.utils.authorization import require_login, check_permissions
|
||||
from pillar.auth import current_user
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
blueprint_search = Blueprint('elksearch', __name__)
|
||||
|
||||
from . import queries
|
||||
|
||||
#@authorization.require_login(require_cap='subscriber')
|
||||
@blueprint_search.route('/', methods=['GET'])
|
||||
def search_nodes():
|
||||
|
||||
searchword = request.args.get('q', '')
|
||||
|
||||
if not searchword:
|
||||
return 'You are forgetting a "?q=whatareyoulookingfor"'
|
||||
|
||||
data = queries.do_search(searchword)
|
||||
|
||||
resp = Response(json.dumps(data), mimetype='application/json')
|
||||
return resp
|
@@ -16,7 +16,7 @@ def index_user_save(to_index_user: dict):
|
||||
return
|
||||
|
||||
# Create or update Algolia index for the user
|
||||
index_users.save_object()
|
||||
index_users.save_object(to_index_user)
|
||||
|
||||
|
||||
@skip_when_testing
|
||||
|
Reference in New Issue
Block a user