1 Commits

Author SHA1 Message Date
4927a26497 Videojs: Show endscreen with links 2018-09-14 02:59:21 +02:00
239 changed files with 10650 additions and 24837 deletions

View File

@@ -1,3 +0,0 @@
{
"presets": ["@babel/preset-env"]
}

View File

@@ -65,12 +65,6 @@ You can run the Celery Worker using `manage.py celery worker`.
Find other Celery operations with the `manage.py celery` command. Find other Celery operations with the `manage.py celery` command.
## Elasticsearch
Pillar uses [Elasticsearch](https://www.elastic.co/products/elasticsearch) to power the search engine.
You will need to run the `manage.py elastic reset_index` command to initialize the indexing.
If you need to reindex your documents in elastic you run the `manage.py elastic reindex` command.
## Translations ## Translations
If the language you want to support doesn't exist, you need to run: `translations init es_AR`. If the language you want to support doesn't exist, you need to run: `translations init es_AR`.

View File

@@ -1,27 +1,20 @@
let argv = require('minimist')(process.argv.slice(2)); var argv = require('minimist')(process.argv.slice(2));
let autoprefixer = require('gulp-autoprefixer'); var autoprefixer = require('gulp-autoprefixer');
let cache = require('gulp-cached'); var cache = require('gulp-cached');
let chmod = require('gulp-chmod'); var chmod = require('gulp-chmod');
let concat = require('gulp-concat'); var concat = require('gulp-concat');
let git = require('gulp-git'); var git = require('gulp-git');
let gulpif = require('gulp-if'); var gulpif = require('gulp-if');
let gulp = require('gulp'); var gulp = require('gulp');
let livereload = require('gulp-livereload'); var livereload = require('gulp-livereload');
let plumber = require('gulp-plumber'); var plumber = require('gulp-plumber');
let pug = require('gulp-pug'); var pug = require('gulp-pug');
let rename = require('gulp-rename'); var rename = require('gulp-rename');
let sass = require('gulp-sass'); var sass = require('gulp-sass');
let sourcemaps = require('gulp-sourcemaps'); var sourcemaps = require('gulp-sourcemaps');
let uglify = require('gulp-uglify-es').default; var uglify = require('gulp-uglify-es').default;
let browserify = require('browserify');
let babelify = require('babelify');
let sourceStream = require('vinyl-source-stream');
let glob = require('glob');
let es = require('event-stream');
let path = require('path');
let buffer = require('vinyl-buffer');
let enabled = { var enabled = {
uglify: argv.production, uglify: argv.production,
maps: !argv.production, maps: !argv.production,
failCheck: !argv.production, failCheck: !argv.production,
@@ -31,21 +24,20 @@ let enabled = {
chmod: argv.production, chmod: argv.production,
}; };
let destination = { var destination = {
css: 'pillar/web/static/assets/css', css: 'pillar/web/static/assets/css',
pug: 'pillar/web/templates', pug: 'pillar/web/templates',
js: 'pillar/web/static/assets/js', js: 'pillar/web/static/assets/js',
} }
let source = { var source = {
bootstrap: 'node_modules/bootstrap/', bootstrap: 'node_modules/bootstrap/',
jquery: 'node_modules/jquery/', jquery: 'node_modules/jquery/',
popper: 'node_modules/popper.js/', popper: 'node_modules/popper.js/'
vue: 'node_modules/vue/',
} }
/* Stylesheets */ /* CSS */
gulp.task('styles', function(done) { gulp.task('styles', function() {
gulp.src('src/styles/**/*.sass') gulp.src('src/styles/**/*.sass')
.pipe(gulpif(enabled.failCheck, plumber())) .pipe(gulpif(enabled.failCheck, plumber()))
.pipe(gulpif(enabled.maps, sourcemaps.init())) .pipe(gulpif(enabled.maps, sourcemaps.init()))
@@ -56,12 +48,11 @@ gulp.task('styles', function(done) {
.pipe(gulpif(enabled.maps, sourcemaps.write("."))) .pipe(gulpif(enabled.maps, sourcemaps.write(".")))
.pipe(gulp.dest(destination.css)) .pipe(gulp.dest(destination.css))
.pipe(gulpif(argv.livereload, livereload())); .pipe(gulpif(argv.livereload, livereload()));
done();
}); });
/* Templates */ /* Templates - Pug */
gulp.task('templates', function(done) { gulp.task('templates', function() {
gulp.src('src/templates/**/*.pug') gulp.src('src/templates/**/*.pug')
.pipe(gulpif(enabled.failCheck, plumber())) .pipe(gulpif(enabled.failCheck, plumber()))
.pipe(gulpif(enabled.cachify, cache('templating'))) .pipe(gulpif(enabled.cachify, cache('templating')))
@@ -70,12 +61,11 @@ gulp.task('templates', function(done) {
})) }))
.pipe(gulp.dest(destination.pug)) .pipe(gulp.dest(destination.pug))
.pipe(gulpif(argv.livereload, livereload())); .pipe(gulpif(argv.livereload, livereload()));
done();
}); });
/* Individual Uglified Scripts */ /* Individual Uglified Scripts */
gulp.task('scripts', function(done) { gulp.task('scripts', function() {
gulp.src('src/scripts/*.js') gulp.src('src/scripts/*.js')
.pipe(gulpif(enabled.failCheck, plumber())) .pipe(gulpif(enabled.failCheck, plumber()))
.pipe(gulpif(enabled.cachify, cache('scripting'))) .pipe(gulpif(enabled.cachify, cache('scripting')))
@@ -83,96 +73,37 @@ gulp.task('scripts', function(done) {
.pipe(gulpif(enabled.uglify, uglify())) .pipe(gulpif(enabled.uglify, uglify()))
.pipe(rename({suffix: '.min'})) .pipe(rename({suffix: '.min'}))
.pipe(gulpif(enabled.maps, sourcemaps.write("."))) .pipe(gulpif(enabled.maps, sourcemaps.write(".")))
.pipe(gulpif(enabled.chmod, chmod(0o644))) .pipe(gulpif(enabled.chmod, chmod(644)))
.pipe(gulp.dest(destination.js)) .pipe(gulp.dest(destination.js))
.pipe(gulpif(argv.livereload, livereload())); .pipe(gulpif(argv.livereload, livereload()));
done();
});
function browserify_base(entry) {
let pathSplited = path.dirname(entry).split(path.sep);
let moduleName = pathSplited[pathSplited.length - 1];
return browserify({
entries: [entry],
standalone: 'pillar.' + moduleName,
})
.transform(babelify, { "presets": ["@babel/preset-env"] })
.bundle()
.pipe(gulpif(enabled.failCheck, plumber()))
.pipe(sourceStream(path.basename(entry)))
.pipe(buffer())
.pipe(rename({
basename: moduleName,
extname: '.min.js'
}));
}
/**
* Transcompile and package common modules to be included in tutti.js.
*
* Example:
* src/scripts/js/es6/common/api/init.js
* src/scripts/js/es6/common/events/init.js
* Everything exported in api/init.js will end up in module pillar.api.*, and everything exported in events/init.js
* will end up in pillar.events.*
*/
function browserify_common() {
return glob.sync('src/scripts/js/es6/common/**/init.js').map(browserify_base);
}
/**
* Transcompile and package individual modules.
*
* Example:
* src/scripts/js/es6/individual/coolstuff/init.js
* Will create a coolstuff.js and everything exported in init.js will end up in namespace pillar.coolstuff.*
*/
gulp.task('scripts_browserify', function(done) {
glob('src/scripts/js/es6/individual/**/init.js', function(err, files) {
if(err) done(err);
var tasks = files.map(function(entry) {
return browserify_base(entry)
.pipe(gulpif(enabled.maps, sourcemaps.init()))
.pipe(gulpif(enabled.uglify, uglify()))
.pipe(gulpif(enabled.maps, sourcemaps.write(".")))
.pipe(gulp.dest(destination.js));
});
es.merge(tasks).on('end', done);
})
}); });
/* Collection of scripts in src/scripts/tutti/ and src/scripts/js/es6/common/ to merge into tutti.min.js /* Collection of scripts in src/scripts/tutti/ to merge into tutti.min.js
* Since it's always loaded, it's only for functions that we want site-wide. * Since it's always loaded, it's only for functions that we want site-wide.
* It also includes jQuery and Bootstrap (and its dependency popper), since * It also includes jQuery and Bootstrap (and its dependency popper), since
* the site doesn't work without it anyway.*/ * the site doesn't work without it anyway.*/
gulp.task('scripts_concat_tutti', function(done) { gulp.task('scripts_concat_tutti', function() {
let toUglify = [ toUglify = [
source.jquery + 'dist/jquery.min.js', source.jquery + 'dist/jquery.min.js',
source.vue + (enabled.uglify ? 'dist/vue.min.js' : 'dist/vue.js'),
source.popper + 'dist/umd/popper.min.js', source.popper + 'dist/umd/popper.min.js',
source.bootstrap + 'js/dist/index.js', source.bootstrap + 'js/dist/index.js',
source.bootstrap + 'js/dist/util.js', source.bootstrap + 'js/dist/util.js',
source.bootstrap + 'js/dist/alert.js',
source.bootstrap + 'js/dist/collapse.js',
source.bootstrap + 'js/dist/dropdown.js',
source.bootstrap + 'js/dist/tooltip.js', source.bootstrap + 'js/dist/tooltip.js',
source.bootstrap + 'js/dist/dropdown.js',
'src/scripts/tutti/**/*.js' 'src/scripts/tutti/**/*.js'
]; ];
es.merge(gulp.src(toUglify), ...browserify_common()) gulp.src(toUglify)
.pipe(gulpif(enabled.failCheck, plumber())) .pipe(gulpif(enabled.failCheck, plumber()))
.pipe(gulpif(enabled.maps, sourcemaps.init())) .pipe(gulpif(enabled.maps, sourcemaps.init()))
.pipe(concat("tutti.min.js")) .pipe(concat("tutti.min.js"))
.pipe(gulpif(enabled.uglify, uglify())) .pipe(gulpif(enabled.uglify, uglify()))
.pipe(gulpif(enabled.maps, sourcemaps.write("."))) .pipe(gulpif(enabled.maps, sourcemaps.write(".")))
.pipe(gulpif(enabled.chmod, chmod(0o644))) .pipe(gulpif(enabled.chmod, chmod(644)))
.pipe(gulp.dest(destination.js)) .pipe(gulp.dest(destination.js))
.pipe(gulpif(argv.livereload, livereload())); .pipe(gulpif(argv.livereload, livereload()));
done();
}); });
@@ -190,24 +121,22 @@ gulp.task('scripts_move_vendor', function(done) {
// While developing, run 'gulp watch' // While developing, run 'gulp watch'
gulp.task('watch',function(done) { gulp.task('watch',function() {
// Only listen for live reloads if ran with --livereload // Only listen for live reloads if ran with --livereload
if (argv.livereload){ if (argv.livereload){
livereload.listen(); livereload.listen();
} }
gulp.watch('src/styles/**/*.sass',gulp.series('styles')); gulp.watch('src/styles/**/*.sass',['styles']);
gulp.watch('src/templates/**/*.pug',gulp.series('templates')); gulp.watch('src/templates/**/*.pug',['templates']);
gulp.watch('src/scripts/*.js',gulp.series('scripts')); gulp.watch('src/scripts/*.js',['scripts']);
gulp.watch('src/scripts/tutti/**/*.js',gulp.series('scripts_concat_tutti')); gulp.watch('src/scripts/tutti/**/*.js',['scripts_concat_tutti']);
gulp.watch('src/scripts/js/**/*.js',gulp.series(['scripts_browserify', 'scripts_concat_tutti']));
done();
}); });
// Erases all generated files in output directories. // Erases all generated files in output directories.
gulp.task('cleanup', function(done) { gulp.task('cleanup', function() {
let paths = []; var paths = [];
for (attr in destination) { for (attr in destination) {
paths.push(destination[attr]); paths.push(destination[attr]);
} }
@@ -215,20 +144,17 @@ gulp.task('cleanup', function(done) {
git.clean({ args: '-f -X ' + paths.join(' ') }, function (err) { git.clean({ args: '-f -X ' + paths.join(' ') }, function (err) {
if(err) throw err; if(err) throw err;
}); });
done();
}); });
// Run 'gulp' to build everything at once // Run 'gulp' to build everything at once
let tasks = []; var tasks = [];
if (enabled.cleanup) tasks.push('cleanup'); if (enabled.cleanup) tasks.push('cleanup');
// gulp.task('default', gulp.parallel('styles', 'templates', 'scripts', 'scripts_tutti')); gulp.task('default', tasks.concat([
gulp.task('default', gulp.parallel(tasks.concat([
'styles', 'styles',
'templates', 'templates',
'scripts', 'scripts',
'scripts_concat_tutti', 'scripts_concat_tutti',
'scripts_move_vendor', 'scripts_move_vendor',
'scripts_browserify', ]));
])));

View File

@@ -1,180 +0,0 @@
// For a detailed explanation regarding each configuration property, visit:
// https://jestjs.io/docs/en/configuration.html
module.exports = {
// All imported modules in your tests should be mocked automatically
// automock: false,
// Stop running tests after the first failure
// bail: false,
// Respect "browser" field in package.json when resolving modules
// browser: false,
// The directory where Jest should store its cached dependency information
// cacheDirectory: "/tmp/jest_rs",
// Automatically clear mock calls and instances between every test
clearMocks: true,
// Indicates whether the coverage information should be collected while executing the test
// collectCoverage: false,
// An array of glob patterns indicating a set of files for which coverage information should be collected
// collectCoverageFrom: null,
// The directory where Jest should output its coverage files
// coverageDirectory: null,
// An array of regexp pattern strings used to skip coverage collection
// coveragePathIgnorePatterns: [
// "/node_modules/"
// ],
// A list of reporter names that Jest uses when writing coverage reports
// coverageReporters: [
// "json",
// "text",
// "lcov",
// "clover"
// ],
// An object that configures minimum threshold enforcement for coverage results
// coverageThreshold: null,
// Make calling deprecated APIs throw helpful error messages
// errorOnDeprecated: false,
// Force coverage collection from ignored files usin a array of glob patterns
// forceCoverageMatch: [],
// A path to a module which exports an async function that is triggered once before all test suites
// globalSetup: null,
// A path to a module which exports an async function that is triggered once after all test suites
// globalTeardown: null,
// A set of global variables that need to be available in all test environments
// globals: {},
// An array of directory names to be searched recursively up from the requiring module's location
// moduleDirectories: [
// "node_modules"
// ],
// An array of file extensions your modules use
// moduleFileExtensions: [
// "js",
// "json",
// "jsx",
// "node"
// ],
// A map from regular expressions to module names that allow to stub out resources with a single module
// moduleNameMapper: {},
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
// modulePathIgnorePatterns: [],
// Activates notifications for test results
// notify: false,
// An enum that specifies notification mode. Requires { notify: true }
// notifyMode: "always",
// A preset that is used as a base for Jest's configuration
// preset: null,
// Run tests from one or more projects
// projects: null,
// Use this configuration option to add custom reporters to Jest
// reporters: undefined,
// Automatically reset mock state between every test
// resetMocks: false,
// Reset the module registry before running each individual test
// resetModules: false,
// A path to a custom resolver
// resolver: null,
// Automatically restore mock state between every test
// restoreMocks: false,
// The root directory that Jest should scan for tests and modules within
// rootDir: null,
// A list of paths to directories that Jest should use to search for files in
// roots: [
// "<rootDir>"
// ],
// Allows you to use a custom runner instead of Jest's default test runner
// runner: "jest-runner",
// The paths to modules that run some code to configure or set up the testing environment before each test
setupFiles: ["<rootDir>/src/scripts/js/es6/test_config/test-env.js"],
// The path to a module that runs some code to configure or set up the testing framework before each test
// setupTestFrameworkScriptFile: null,
// A list of paths to snapshot serializer modules Jest should use for snapshot testing
// snapshotSerializers: [],
// The test environment that will be used for testing
testEnvironment: "jsdom",
// Options that will be passed to the testEnvironment
// testEnvironmentOptions: {},
// Adds a location field to test results
// testLocationInResults: false,
// The glob patterns Jest uses to detect test files
// testMatch: [
// "**/__tests__/**/*.js?(x)",
// "**/?(*.)+(spec|test).js?(x)"
// ],
// An array of regexp pattern strings that are matched against all test paths, matched tests are skipped
// testPathIgnorePatterns: [
// "/node_modules/"
// ],
// The regexp pattern Jest uses to detect test files
// testRegex: "",
// This option allows the use of a custom results processor
// testResultsProcessor: null,
// This option allows use of a custom test runner
// testRunner: "jasmine2",
// This option sets the URL for the jsdom environment. It is reflected in properties such as location.href
// testURL: "http://localhost",
// Setting this value to "fake" allows the use of fake timers for functions such as "setTimeout"
// timers: "real",
// A map from regular expressions to paths to transformers
// transform: null,
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
// transformIgnorePatterns: [
// "/node_modules/"
// ],
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
// unmockedModulePathPatterns: undefined,
// Indicates whether each individual test should be reported during the run
// verbose: null,
// An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode
// watchPathIgnorePatterns: [],
// Whether to use watchman for file crawling
// watchman: true,
};

10079
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -7,48 +7,26 @@
"url": "git://git.blender.org/pillar.git" "url": "git://git.blender.org/pillar.git"
}, },
"devDependencies": { "devDependencies": {
"@babel/core": "7.1.6", "gulp": "^3.9.1",
"@babel/preset-env": "7.1.6", "gulp-autoprefixer": "^6.0.0",
"acorn": "5.7.3", "gulp-cached": "^1.1.1",
"babel-core": "7.0.0-bridge.0", "gulp-chmod": "^2.0.0",
"babelify": "10.0.0", "gulp-concat": "^2.6.1",
"browserify": "16.2.3", "gulp-if": "^2.0.2",
"gulp": "4.0.0", "gulp-git": "^2.8.0",
"gulp-autoprefixer": "6.0.0", "gulp-livereload": "^4.0.0",
"gulp-babel": "8.0.0", "gulp-plumber": "^1.2.0",
"gulp-cached": "1.1.1", "gulp-pug": "^4.0.1",
"gulp-chmod": "2.0.0", "gulp-rename": "^1.4.0",
"gulp-concat": "2.6.1", "gulp-sass": "^4.0.1",
"gulp-git": "2.8.0", "gulp-sourcemaps": "^2.6.4",
"gulp-if": "2.0.2", "gulp-uglify-es": "^1.0.4",
"gulp-livereload": "4.0.0", "minimist": "^1.2.0"
"gulp-plumber": "1.2.0",
"gulp-pug": "4.0.1",
"gulp-rename": "1.4.0",
"gulp-sass": "4.0.1",
"gulp-sourcemaps": "2.6.4",
"gulp-uglify-es": "1.0.4",
"jest": "23.6.0",
"minimist": "1.2.0",
"vinyl-buffer": "1.0.1",
"vinyl-source-stream": "2.0.0"
}, },
"dependencies": { "dependencies": {
"bootstrap": "4.1.3", "bootstrap": "^4.1.3",
"glob": "7.1.3", "jquery": "^3.3.1",
"jquery": "3.3.1", "popper.js": "^1.14.4",
"natives": "^1.1.6", "video.js": "^7.2.2"
"popper.js": "1.14.4",
"video.js": "7.2.2",
"vue": "2.5.17"
},
"scripts": {
"test": "jest"
},
"__COMMENTS__": [
"natives@1.1.6 for Gulp 3.x on Node 10.x: https://github.com/gulpjs/gulp/issues/2162#issuecomment-385197164"
],
"resolutions": {
"natives": "1.1.6"
} }
} }

View File

@@ -712,10 +712,6 @@ class PillarServer(BlinkerCompatibleEve):
authentication.setup_app(self) authentication.setup_app(self)
# Register Flask Debug Toolbar (disabled by default).
from flask_debugtoolbar import DebugToolbarExtension
DebugToolbarExtension(self)
for ext in self.pillar_extensions.values(): for ext in self.pillar_extensions.values():
self.log.info('Setting up extension %s', ext.name) self.log.info('Setting up extension %s', ext.name)
ext.setup_app(self) ext.setup_app(self)
@@ -726,7 +722,6 @@ class PillarServer(BlinkerCompatibleEve):
self._config_user_caps() self._config_user_caps()
# Only enable this when debugging. # Only enable this when debugging.
# TODO(fsiddi): Consider removing this in favor of the routes tab in Flask Debug Toolbar.
# self._list_routes() # self._list_routes()
def setup_db_indices(self): def setup_db_indices(self):
@@ -790,7 +785,7 @@ class PillarServer(BlinkerCompatibleEve):
return 'basic ' + base64.b64encode('%s:%s' % (username, subclient_id)) return 'basic ' + base64.b64encode('%s:%s' % (username, subclient_id))
def post_internal(self, resource: str, payl=None, skip_validation=False): def post_internal(self, resource: str, payl=None, skip_validation=False):
"""Workaround for Eve issue https://github.com/pyeve/eve/issues/810""" """Workaround for Eve issue https://github.com/nicolaiarocci/eve/issues/810"""
from eve.methods.post import post_internal from eve.methods.post import post_internal
url = self.config['URLS'][resource] url = self.config['URLS'][resource]
@@ -800,7 +795,7 @@ class PillarServer(BlinkerCompatibleEve):
def put_internal(self, resource: str, payload=None, concurrency_check=False, def put_internal(self, resource: str, payload=None, concurrency_check=False,
skip_validation=False, **lookup): skip_validation=False, **lookup):
"""Workaround for Eve issue https://github.com/pyeve/eve/issues/810""" """Workaround for Eve issue https://github.com/nicolaiarocci/eve/issues/810"""
from eve.methods.put import put_internal from eve.methods.put import put_internal
url = self.config['URLS'][resource] url = self.config['URLS'][resource]
@@ -811,7 +806,7 @@ class PillarServer(BlinkerCompatibleEve):
def patch_internal(self, resource: str, payload=None, concurrency_check=False, def patch_internal(self, resource: str, payload=None, concurrency_check=False,
skip_validation=False, **lookup): skip_validation=False, **lookup):
"""Workaround for Eve issue https://github.com/pyeve/eve/issues/810""" """Workaround for Eve issue https://github.com/nicolaiarocci/eve/issues/810"""
from eve.methods.patch import patch_internal from eve.methods.patch import patch_internal
url = self.config['URLS'][resource] url = self.config['URLS'][resource]
@@ -822,7 +817,7 @@ class PillarServer(BlinkerCompatibleEve):
def delete_internal(self, resource: str, concurrency_check=False, def delete_internal(self, resource: str, concurrency_check=False,
suppress_callbacks=False, **lookup): suppress_callbacks=False, **lookup):
"""Workaround for Eve issue https://github.com/pyeve/eve/issues/810""" """Workaround for Eve issue https://github.com/nicolaiarocci/eve/issues/810"""
from eve.methods.delete import deleteitem_internal from eve.methods.delete import deleteitem_internal
url = self.config['URLS'][resource] url = self.config['URLS'][resource]

View File

@@ -1,6 +1,6 @@
def setup_app(app): def setup_app(app):
from . import encoding, blender_id, projects, local_auth, file_storage from . import encoding, blender_id, projects, local_auth, file_storage
from . import users, nodes, latest, blender_cloud, service, activities, timeline from . import users, nodes, latest, blender_cloud, service, activities
from . import organizations from . import organizations
from . import search from . import search
@@ -11,7 +11,6 @@ def setup_app(app):
local_auth.setup_app(app, url_prefix='/auth') local_auth.setup_app(app, url_prefix='/auth')
file_storage.setup_app(app, url_prefix='/storage') file_storage.setup_app(app, url_prefix='/storage')
latest.setup_app(app, url_prefix='/latest') latest.setup_app(app, url_prefix='/latest')
timeline.setup_app(app, url_prefix='/timeline')
blender_cloud.setup_app(app, url_prefix='/bcloud') blender_cloud.setup_app(app, url_prefix='/bcloud')
users.setup_app(app, api_prefix='/users') users.setup_app(app, api_prefix='/users')
service.setup_app(app, api_prefix='/service') service.setup_app(app, api_prefix='/service')

View File

@@ -1,5 +1,4 @@
import logging import logging
from html.parser import HTMLParser
from flask import request, current_app from flask import request, current_app
from pillar.api.utils import gravatar from pillar.api.utils import gravatar
@@ -8,15 +7,6 @@ from pillar.auth import current_user
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
class CommentHTMLParser(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
self.data = []
def handle_data(self, data):
self.data.append(data)
def notification_parse(notification): def notification_parse(notification):
activities_collection = current_app.data.driver.db['activities'] activities_collection = current_app.data.driver.db['activities']
activities_subscriptions_collection = \ activities_subscriptions_collection = \
@@ -40,14 +30,9 @@ def notification_parse(notification):
object_type = 'comment' object_type = 'comment'
object_name = '' object_name = ''
object_id = activity['object'] object_id = activity['object']
context_object_type = node['parent']['node_type']
# If node_type is 'dillo_post', just call it 'post'
node_type = 'post' if context_object_type.endswith('_post') else \
context_object_type
if node['parent']['user'] == current_user.user_id: if node['parent']['user'] == current_user.user_id:
owner = f"your {node_type}" owner = "your {0}".format(node['parent']['node_type'])
else: else:
parent_comment_user = users_collection.find_one( parent_comment_user = users_collection.find_one(
{'_id': node['parent']['user']}) {'_id': node['parent']['user']})
@@ -55,22 +40,10 @@ def notification_parse(notification):
user_name = 'their' user_name = 'their'
else: else:
user_name = "{0}'s".format(parent_comment_user['username']) user_name = "{0}'s".format(parent_comment_user['username'])
owner = "{0} {1}".format(user_name, node['parent']['node_type'])
owner = f"{user_name} {node_type}" context_object_type = node['parent']['node_type']
context_object_name = owner
context_object_name = f"{node['parent']['name'][:50]}..."
if context_object_type == 'comment':
# Parse the comment content, which might be HTML and extract
# some text from it.
parser = CommentHTMLParser()
# Trim the comment content to 50 chars, the parser will handle it
parser.feed(node['properties']['content'][:50])
try:
comment_content = parser.data[0]
except KeyError:
comment_content = '...'
# Trim the parsed text down to 15 charss
context_object_name = f"{comment_content[:50]}..."
context_object_id = activity['context_object'] context_object_id = activity['context_object']
if activity['verb'] == 'replied': if activity['verb'] == 'replied':
action = 'replied to' action = 'replied to'
@@ -79,15 +52,13 @@ def notification_parse(notification):
else: else:
action = activity['verb'] action = activity['verb']
action = f'{action} {owner}'
lookup = { lookup = {
'user': current_user.user_id, 'user': current_user.user_id,
'context_object_type': 'node', 'context_object_type': 'node',
'context_object': context_object_id, 'context_object': context_object_id,
} }
subscription = activities_subscriptions_collection.find_one(lookup) subscription = activities_subscriptions_collection.find_one(lookup)
if subscription and subscription['notifications']['web'] is True: if subscription and subscription['notifications']['web'] == True:
is_subscribed = True is_subscribed = True
else: else:
is_subscribed = False is_subscribed = False
@@ -148,8 +119,6 @@ def activity_subscribe(user_id, context_object_type, context_object_id):
# If no subscription exists, we create one # If no subscription exists, we create one
if not subscription: if not subscription:
# Workaround for issue: https://github.com/pyeve/eve/issues/1174
lookup['notifications'] = {}
current_app.post_internal('activities-subscriptions', lookup) current_app.post_internal('activities-subscriptions', lookup)

View File

@@ -1,3 +1,4 @@
import copy
from datetime import datetime from datetime import datetime
import logging import logging
@@ -5,12 +6,36 @@ from bson import ObjectId, tz_util
from eve.io.mongo import Validator from eve.io.mongo import Validator
from flask import current_app from flask import current_app
from pillar import markdown import pillar.markdown
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
class ValidateCustomFields(Validator): class ValidateCustomFields(Validator):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Will be reference to the actual document being validated, so that we can
# modify it during validation.
self.__real_document = None
def validate(self, document, *args, **kwargs):
# Keep a reference to the actual document, because Cerberus validates copies.
self.__real_document = document
result = super().validate(document, *args, **kwargs)
# Store the in-place modified document as self.document, so that Eve's post_internal
# can actually pick it up as the validated document. We need to make a copy so that
# further modifications (like setting '_etag' etc.) aren't done in-place.
self.document = copy.deepcopy(document)
return result
def _get_child_validator(self, *args, **kwargs):
child = super()._get_child_validator(*args, **kwargs)
# Pass along our reference to the actual document.
child.__real_document = self.__real_document
return child
# TODO: split this into a convert_property(property, schema) and call that from this function. # TODO: split this into a convert_property(property, schema) and call that from this function.
def convert_properties(self, properties, node_schema): def convert_properties(self, properties, node_schema):
@@ -112,7 +137,8 @@ class ValidateCustomFields(Validator):
if val: if val:
# This ensures the modifications made by v's coercion rules are # This ensures the modifications made by v's coercion rules are
# visible to this validator's output. # visible to this validator's output.
self.document[field] = v.document # TODO(fsiddi): this no longer works due to Cerberus internal changes.
# self.current[field] = v.current
return True return True
log.warning('Error validating properties for node %s: %s', self.document, v.errors) log.warning('Error validating properties for node %s: %s', self.document, v.errors)
@@ -157,19 +183,36 @@ class ValidateCustomFields(Validator):
if ip.prefixlen() == 0: if ip.prefixlen() == 0:
self._error(field_name, 'Zero-length prefix is not allowed') self._error(field_name, 'Zero-length prefix is not allowed')
def _normalize_coerce_markdown(self, markdown_field: str) -> str: def _validator_markdown(self, field, value):
"""Convert MarkDown.
""" """
Cache markdown as html. my_log = log.getChild('_validator_markdown')
:param markdown_field: name of the field containing Markdown # Find this field inside the original document
:return: html string my_subdoc = self._subdoc_in_real_document()
if my_subdoc is None:
# If self.update==True we are validating an update document, which
# may not contain all fields, so then a missing field is fine.
if not self.update:
self._error(field, f'validator_markdown: unable to find sub-document '
f'for path {self.document_path}')
return
my_log.debug('validating field %r with value %r', field, value)
save_to = pillar.markdown.cache_field_name(field)
html = pillar.markdown.markdown(value)
my_log.debug('saving result to %r in doc with id %s', save_to, id(my_subdoc))
my_subdoc[save_to] = html
def _subdoc_in_real_document(self):
"""Return a reference to the current sub-document inside the real document.
This allows modification of the document being validated.
""" """
my_log = log.getChild('_normalize_coerce_markdown') my_subdoc = getattr(self, 'persisted_document') or self.__real_document
mdown = self.document.get(markdown_field, '') for item in self.document_path:
html = markdown.markdown(mdown) my_subdoc = my_subdoc[item]
my_log.debug('Generated html for markdown field %s in doc with id %s', return my_subdoc
markdown_field, id(self.document))
return html
if __name__ == '__main__': if __name__ == '__main__':

View File

@@ -1,8 +1,5 @@
import os import os
from pillar.api.node_types.utils import markdown_fields
STORAGE_BACKENDS = ["local", "pillar", "cdnsun", "gcs", "unittest"]
URL_PREFIX = 'api' URL_PREFIX = 'api'
# Enable reads (GET), inserts (POST) and DELETE for resources/collections # Enable reads (GET), inserts (POST) and DELETE for resources/collections
@@ -186,7 +183,12 @@ organizations_schema = {
'maxlength': 128, 'maxlength': 128,
'required': True 'required': True
}, },
**markdown_fields('description', maxlength=256), 'description': {
'type': 'string',
'maxlength': 256,
'validator': 'markdown',
},
'_description_html': {'type': 'string'},
'website': { 'website': {
'type': 'string', 'type': 'string',
'maxlength': 256, 'maxlength': 256,
@@ -319,7 +321,11 @@ nodes_schema = {
'maxlength': 128, 'maxlength': 128,
'required': True, 'required': True,
}, },
**markdown_fields('description'), 'description': {
'type': 'string',
'validator': 'markdown',
},
'_description_html': {'type': 'string'},
'picture': _file_embedded_schema, 'picture': _file_embedded_schema,
'order': { 'order': {
'type': 'integer', 'type': 'integer',
@@ -457,7 +463,7 @@ files_schema = {
'backend': { 'backend': {
'type': 'string', 'type': 'string',
'required': True, 'required': True,
'allowed': STORAGE_BACKENDS, 'allowed': ["local", "pillar", "cdnsun", "gcs", "unittest"]
}, },
# Where the file is in the backend storage itself. In the case of GCS, # Where the file is in the backend storage itself. In the case of GCS,
@@ -569,7 +575,11 @@ projects_schema = {
'maxlength': 128, 'maxlength': 128,
'required': True, 'required': True,
}, },
**markdown_fields('description'), 'description': {
'type': 'string',
'validator': 'markdown',
},
'_description_html': {'type': 'string'},
# Short summary for the project # Short summary for the project
'summary': { 'summary': {
'type': 'string', 'type': 'string',
@@ -579,8 +589,6 @@ projects_schema = {
'picture_square': _file_embedded_schema, 'picture_square': _file_embedded_schema,
# Header # Header
'picture_header': _file_embedded_schema, 'picture_header': _file_embedded_schema,
# Picture with a 16:9 aspect ratio (for Open Graph)
'picture_16_9': _file_embedded_schema,
'header_node': dict( 'header_node': dict(
nullable=True, nullable=True,
**_node_embedded_schema **_node_embedded_schema

View File

@@ -5,7 +5,6 @@ import mimetypes
import os import os
import pathlib import pathlib
import tempfile import tempfile
import time
import typing import typing
import uuid import uuid
from hashlib import md5 from hashlib import md5
@@ -131,67 +130,6 @@ def _process_image(bucket: Bucket,
src_file['status'] = 'complete' src_file['status'] = 'complete'
def _video_duration_seconds(filename: pathlib.Path) -> typing.Optional[int]:
"""Get the duration of a video file using ffprobe
https://superuser.com/questions/650291/how-to-get-video-duration-in-seconds
:param filename: file path to video
:return: video duration in seconds
"""
import subprocess
def run(cli_args):
if log.isEnabledFor(logging.INFO):
import shlex
cmd = ' '.join(shlex.quote(s) for s in cli_args)
log.info('Calling %s', cmd)
ffprobe = subprocess.run(
cli_args,
stdin=subprocess.DEVNULL,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
timeout=10, # seconds
)
if ffprobe.returncode:
import shlex
cmd = ' '.join(shlex.quote(s) for s in cli_args)
log.error('Error running %s: stopped with return code %i',
cmd, ffprobe.returncode)
log.error('Output was: %s', ffprobe.stdout)
return None
try:
return int(float(ffprobe.stdout))
except ValueError as e:
log.exception('ffprobe produced invalid number: %s', ffprobe.stdout)
return None
ffprobe_from_container_args = [
current_app.config['BIN_FFPROBE'],
'-v', 'error',
'-show_entries', 'format=duration',
'-of', 'default=noprint_wrappers=1:nokey=1',
str(filename),
]
ffprobe_from_stream_args = [
current_app.config['BIN_FFPROBE'],
'-v', 'error',
'-hide_banner',
'-select_streams', 'v:0', # we only care about the first video stream
'-show_entries', 'stream=duration',
'-of', 'default=noprint_wrappers=1:nokey=1',
str(filename),
]
duration = run(ffprobe_from_stream_args) or \
run(ffprobe_from_container_args) or \
None
return duration
def _video_size_pixels(filename: pathlib.Path) -> typing.Tuple[int, int]: def _video_size_pixels(filename: pathlib.Path) -> typing.Tuple[int, int]:
"""Figures out the size (in pixels) of the video file. """Figures out the size (in pixels) of the video file.
@@ -282,10 +220,8 @@ def _process_video(gcs,
# by determining the video size here we already have this information in the file # by determining the video size here we already have this information in the file
# document before Zencoder calls our notification URL. It also opens up possibilities # document before Zencoder calls our notification URL. It also opens up possibilities
# for other encoding backends that don't support this functionality. # for other encoding backends that don't support this functionality.
video_path = pathlib.Path(local_file.name) video_width, video_height = _video_size_pixels(pathlib.Path(local_file.name))
video_width, video_height = _video_size_pixels(video_path)
capped_video_width, capped_video_height = _video_cap_at_1080(video_width, video_height) capped_video_width, capped_video_height = _video_cap_at_1080(video_width, video_height)
video_duration = _video_duration_seconds(video_path)
# Create variations # Create variations
root, _ = os.path.splitext(src_file['file_path']) root, _ = os.path.splitext(src_file['file_path'])
@@ -298,13 +234,12 @@ def _process_video(gcs,
content_type='video/{}'.format(v), content_type='video/{}'.format(v),
file_path='{}-{}.{}'.format(root, v, v), file_path='{}-{}.{}'.format(root, v, v),
size='', size='',
duration=0,
width=capped_video_width, width=capped_video_width,
height=capped_video_height, height=capped_video_height,
length=0, length=0,
md5='', md5='',
) )
if video_duration:
file_variation['duration'] = video_duration
# Append file variation. Originally mp4 and webm were the available options, # Append file variation. Originally mp4 and webm were the available options,
# that's why we build a list. # that's why we build a list.
src_file['variations'].append(file_variation) src_file['variations'].append(file_variation)
@@ -610,7 +545,6 @@ def refresh_links_for_backend(backend_name, chunk_size, expiry_seconds):
import gcloud.exceptions import gcloud.exceptions
my_log = log.getChild(f'refresh_links_for_backend.{backend_name}') my_log = log.getChild(f'refresh_links_for_backend.{backend_name}')
start_time = time.time()
# Retrieve expired links. # Retrieve expired links.
files_collection = current_app.data.driver.db['files'] files_collection = current_app.data.driver.db['files']
@@ -634,10 +568,10 @@ def refresh_links_for_backend(backend_name, chunk_size, expiry_seconds):
return return
if 0 < chunk_size == document_count: if 0 < chunk_size == document_count:
my_log.info('Found %d documents to refresh, probably limited by the chunk size %d', my_log.info('Found %d documents to refresh, probably limited by the chunk size.',
document_count, chunk_size) document_count)
else: else:
my_log.info('Found %d documents to refresh, chunk size=%d', document_count, chunk_size) my_log.info('Found %d documents to refresh.', document_count)
refreshed = 0 refreshed = 0
report_chunks = min(max(5, document_count // 25), 100) report_chunks = min(max(5, document_count // 25), 100)
@@ -681,10 +615,8 @@ def refresh_links_for_backend(backend_name, chunk_size, expiry_seconds):
'links', refreshed) 'links', refreshed)
return return
if refreshed % report_chunks != 0:
my_log.info('Refreshed %i links', refreshed) my_log.info('Refreshed %i links', refreshed)
my_log.info('Refresh took %s', datetime.timedelta(seconds=time.time() - start_time))
@require_login() @require_login()
def create_file_doc(name, filename, content_type, length, project, def create_file_doc(name, filename, content_type, length, project,

View File

@@ -90,11 +90,12 @@ class Blob(metaclass=abc.ABCMeta):
def __init__(self, name: str, bucket: Bucket) -> None: def __init__(self, name: str, bucket: Bucket) -> None:
self.name = name self.name = name
"""Name of this blob in the bucket."""
self.bucket = bucket self.bucket = bucket
self._size_in_bytes: typing.Optional[int] = None self._size_in_bytes: typing.Optional[int] = None
self.filename: str = None
"""Name of the file for the Content-Disposition header when downloading it."""
self._log = logging.getLogger(f'{__name__}.Blob') self._log = logging.getLogger(f'{__name__}.Blob')
def __repr__(self): def __repr__(self):
@@ -132,19 +133,12 @@ class Blob(metaclass=abc.ABCMeta):
file_size=file_size) file_size=file_size)
@abc.abstractmethod @abc.abstractmethod
def update_filename(self, filename: str, *, is_attachment=True): def update_filename(self, filename: str):
"""Sets the filename which is used when downloading the file. """Sets the filename which is used when downloading the file.
Not all storage backends support this, and will use the on-disk filename instead. Not all storage backends support this, and will use the on-disk filename instead.
""" """
@abc.abstractmethod
def update_content_type(self, content_type: str, content_encoding: str = ''):
"""Set the content type (and optionally content encoding).
Not all storage backends support this.
"""
@abc.abstractmethod @abc.abstractmethod
def get_url(self, *, is_public: bool) -> str: def get_url(self, *, is_public: bool) -> str:
"""Returns the URL to access this blob. """Returns the URL to access this blob.

View File

@@ -174,7 +174,7 @@ class GoogleCloudStorageBlob(Blob):
self.gblob.reload() self.gblob.reload()
self._size_in_bytes = self.gblob.size self._size_in_bytes = self.gblob.size
def update_filename(self, filename: str, *, is_attachment=True): def update_filename(self, filename: str):
"""Set the ContentDisposition metadata so that when a file is downloaded """Set the ContentDisposition metadata so that when a file is downloaded
it has a human-readable name. it has a human-readable name.
""" """
@@ -182,17 +182,7 @@ class GoogleCloudStorageBlob(Blob):
if '"' in filename: if '"' in filename:
raise ValueError(f'Filename is not allowed to have double quote in it: {filename!r}') raise ValueError(f'Filename is not allowed to have double quote in it: {filename!r}')
if is_attachment:
self.gblob.content_disposition = f'attachment; filename="{filename}"' self.gblob.content_disposition = f'attachment; filename="{filename}"'
else:
self.gblob.content_disposition = f'filename="{filename}"'
self.gblob.patch()
def update_content_type(self, content_type: str, content_encoding: str = ''):
"""Set the content type (and optionally content encoding)."""
self.gblob.content_type = content_type
self.gblob.content_encoding = content_encoding
self.gblob.patch() self.gblob.patch()
def get_url(self, *, is_public: bool) -> str: def get_url(self, *, is_public: bool) -> str:

View File

@@ -113,13 +113,10 @@ class LocalBlob(Blob):
self._size_in_bytes = file_size self._size_in_bytes = file_size
def update_filename(self, filename: str, *, is_attachment=True): def update_filename(self, filename: str):
# TODO: implement this for local storage. # TODO: implement this for local storage.
self._log.info('update_filename(%r) not supported', filename) self._log.info('update_filename(%r) not supported', filename)
def update_content_type(self, content_type: str, content_encoding: str = ''):
self._log.info('update_content_type(%r, %r) not supported', content_type, content_encoding)
def make_public(self): def make_public(self):
# No-op on this storage backend. # No-op on this storage backend.
pass pass

View File

@@ -29,6 +29,7 @@ def latest_nodes(db_filter, projection, limit):
proj = { proj = {
'_created': 1, '_created': 1,
'_updated': 1, '_updated': 1,
'user.full_name': 1,
'project._id': 1, 'project._id': 1,
'project.url': 1, 'project.url': 1,
'project.name': 1, 'project.name': 1,
@@ -69,7 +70,6 @@ def latest_assets():
{'name': 1, 'node_type': 1, {'name': 1, 'node_type': 1,
'parent': 1, 'picture': 1, 'properties.status': 1, 'parent': 1, 'picture': 1, 'properties.status': 1,
'properties.content_type': 1, 'properties.content_type': 1,
'properties.duration_seconds': 1,
'permissions.world': 1}, 'permissions.world': 1},
12) 12)
@@ -80,7 +80,7 @@ def latest_assets():
def latest_comments(): def latest_comments():
latest = latest_nodes({'node_type': 'comment', latest = latest_nodes({'node_type': 'comment',
'properties.status': 'published'}, 'properties.status': 'published'},
{'parent': 1, 'user.full_name': 1, {'parent': 1,
'properties.content': 1, 'node_type': 1, 'properties.content': 1, 'node_type': 1,
'properties.status': 1, 'properties.status': 1,
'properties.is_reply': 1}, 'properties.is_reply': 1},

View File

@@ -23,6 +23,14 @@ attachments_embedded_schema = {
'type': 'objectid', 'type': 'objectid',
'required': True, 'required': True,
}, },
'link': {
'type': 'string',
'allowed': ['self', 'none', 'custom'],
'default': 'self',
},
'link_custom': {
'type': 'string',
},
'collection': { 'collection': {
'type': 'string', 'type': 'string',
'allowed': ['files'], 'allowed': ['files'],

View File

@@ -24,10 +24,6 @@ node_type_asset = {
'content_type': { 'content_type': {
'type': 'string' 'type': 'string'
}, },
# The duration of a video asset in seconds.
'duration_seconds': {
'type': 'integer'
},
# We point to the original file (and use it to extract any relevant # We point to the original file (and use it to extract any relevant
# variation useful for our scope). # variation useful for our scope).
'file': _file_embedded_schema, 'file': _file_embedded_schema,
@@ -62,7 +58,6 @@ node_type_asset = {
}, },
'form_schema': { 'form_schema': {
'content_type': {'visible': False}, 'content_type': {'visible': False},
'duration_seconds': {'visible': False},
'order': {'visible': False}, 'order': {'visible': False},
'tags': {'visible': False}, 'tags': {'visible': False},
'categories': {'visible': False}, 'categories': {'visible': False},

View File

@@ -1,15 +1,15 @@
from pillar.api.node_types import attachments_embedded_schema
from pillar.api.node_types.utils import markdown_fields
node_type_comment = { node_type_comment = {
'name': 'comment', 'name': 'comment',
'description': 'Comments for asset nodes, pages, etc.', 'description': 'Comments for asset nodes, pages, etc.',
'dyn_schema': { 'dyn_schema': {
# The actual comment content # The actual comment content
**markdown_fields( 'content': {
'content', 'type': 'string',
minlength=5, 'minlength': 5,
required=True), 'required': True,
'validator': 'markdown',
},
'_content_html': {'type': 'string'},
'status': { 'status': {
'type': 'string', 'type': 'string',
'allowed': [ 'allowed': [
@@ -51,8 +51,7 @@ node_type_comment = {
} }
}, },
'confidence': {'type': 'float'}, 'confidence': {'type': 'float'},
'is_reply': {'type': 'boolean'}, 'is_reply': {'type': 'boolean'}
'attachments': attachments_embedded_schema,
}, },
'form_schema': {}, 'form_schema': {},
'parent': ['asset', 'comment'], 'parent': ['asset', 'comment'],

View File

@@ -1,14 +1,17 @@
from pillar.api.node_types import attachments_embedded_schema from pillar.api.node_types import attachments_embedded_schema
from pillar.api.node_types.utils import markdown_fields
node_type_post = { node_type_post = {
'name': 'post', 'name': 'post',
'description': 'A blog post, for any project', 'description': 'A blog post, for any project',
'dyn_schema': { 'dyn_schema': {
**markdown_fields('content', 'content': {
minlength=5, 'type': 'string',
maxlength=90000, 'minlength': 5,
required=True), 'maxlength': 90000,
'required': True,
'validator': 'markdown',
},
'_content_html': {'type': 'string'},
'status': { 'status': {
'type': 'string', 'type': 'string',
'allowed': [ 'allowed': [

View File

@@ -1,34 +0,0 @@
from pillar import markdown
def markdown_fields(field: str, **kwargs) -> dict:
"""
Creates a field for the markdown, and a field for the cached html.
Example usage:
schema = {'myDoc': {
'type': 'list',
'schema': {
'type': 'dict',
'schema': {
**markdown_fields('content', required=True),
}
},
}}
:param field:
:return:
"""
cache_field = markdown.cache_field_name(field)
return {
field: {
'type': 'string',
**kwargs
},
cache_field: {
'type': 'string',
'readonly': True,
'default': field, # Name of the field containing the markdown. Will be input to the coerce function.
'coerce': 'markdown',
}
}

View File

@@ -1,20 +1,58 @@
import base64 import base64
import datetime import functools
import logging import logging
import typing
import urllib.parse
import pymongo.errors import pymongo.errors
import werkzeug.exceptions as wz_exceptions import werkzeug.exceptions as wz_exceptions
from bson import ObjectId
from flask import current_app, Blueprint, request from flask import current_app, Blueprint, request
from pillar.api.nodes import eve_hooks, comments, activities import pillar.markdown
from pillar.api.activities import activity_subscribe, activity_object_add
from pillar.api.node_types import PILLAR_NAMED_NODE_TYPES
from pillar.api.file_storage_backends.gcs import update_file_name
from pillar.api.utils import str2id, jsonify from pillar.api.utils import str2id, jsonify
from pillar.api.utils.authorization import check_permissions, require_login from pillar.api.utils.authorization import check_permissions, require_login
from pillar.web.utils import pretty_date
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
blueprint = Blueprint('nodes_api', __name__) blueprint = Blueprint('nodes_api', __name__)
# TODO(fsiddi) Propose changes to make commenting roles a configuration value. ROLES_FOR_SHARING = {'subscriber', 'demo'}
ROLES_FOR_SHARING = ROLES_FOR_COMMENTING = set()
def only_for_node_type_decorator(*required_node_type_names):
"""Returns a decorator that checks its first argument's node type.
If the node type is not of the required node type, returns None,
otherwise calls the wrapped function.
>>> deco = only_for_node_type_decorator('comment')
>>> @deco
... def handle_comment(node): pass
>>> deco = only_for_node_type_decorator('comment', 'post')
>>> @deco
... def handle_comment_or_post(node): pass
"""
# Convert to a set for efficient 'x in required_node_type_names' queries.
required_node_type_names = set(required_node_type_names)
def only_for_node_type(wrapped):
@functools.wraps(wrapped)
def wrapper(node, *args, **kwargs):
if node.get('node_type') not in required_node_type_names:
return
return wrapped(node, *args, **kwargs)
return wrapper
only_for_node_type.__doc__ = "Decorator, immediately returns when " \
"the first argument is not of type %s." % required_node_type_names
return only_for_node_type
@blueprint.route('/<node_id>/share', methods=['GET', 'POST']) @blueprint.route('/<node_id>/share', methods=['GET', 'POST'])
@@ -49,86 +87,20 @@ def share_node(node_id):
else: else:
return '', 204 return '', 204
return jsonify(eve_hooks.short_link_info(short_code), status=status) return jsonify(short_link_info(short_code), status=status)
@blueprint.route('/<string(length=24):node_path>/comments', methods=['GET'])
def get_node_comments(node_path: str):
node_id = str2id(node_path)
return comments.get_node_comments(node_id)
@blueprint.route('/<string(length=24):node_path>/comments', methods=['POST'])
@require_login(require_roles=ROLES_FOR_COMMENTING)
def post_node_comment(node_path: str):
node_id = str2id(node_path)
msg = request.json['msg']
attachments = request.json.get('attachments', {})
return comments.post_node_comment(node_id, msg, attachments)
@blueprint.route('/<string(length=24):node_path>/comments/<string(length=24):comment_path>', methods=['PATCH'])
@require_login(require_roles=ROLES_FOR_COMMENTING)
def patch_node_comment(node_path: str, comment_path: str):
node_id = str2id(node_path)
comment_id = str2id(comment_path)
msg = request.json['msg']
attachments = request.json.get('attachments', {})
return comments.patch_node_comment(node_id, comment_id, msg, attachments)
@blueprint.route('/<string(length=24):node_path>/comments/<string(length=24):comment_path>/vote', methods=['POST'])
@require_login(require_roles=ROLES_FOR_COMMENTING)
def post_node_comment_vote(node_path: str, comment_path: str):
node_id = str2id(node_path)
comment_id = str2id(comment_path)
vote_str = request.json['vote']
vote = int(vote_str)
return comments.post_node_comment_vote(node_id, comment_id, vote)
@blueprint.route('/<string(length=24):node_path>/activities', methods=['GET'])
def activities_for_node(node_path: str):
node_id = str2id(node_path)
return jsonify(activities.for_node(node_id))
@blueprint.route('/tagged/') @blueprint.route('/tagged/')
@blueprint.route('/tagged/<tag>') @blueprint.route('/tagged/<tag>')
def tagged(tag=''): def tagged(tag=''):
"""Return all tagged nodes of public projects as JSON.""" """Return all tagged nodes of public projects as JSON."""
from pillar.auth import current_user
# We explicitly register the tagless endpoint to raise a 404, otherwise the PATCH # We explicitly register the tagless endpoint to raise a 404, otherwise the PATCH
# handler on /api/nodes/<node_id> will return a 405 Method Not Allowed. # handler on /api/nodes/<node_id> will return a 405 Method Not Allowed.
if not tag: if not tag:
raise wz_exceptions.NotFound() raise wz_exceptions.NotFound()
# Build the (cached) list of tagged nodes return _tagged(tag)
agg_list = _tagged(tag)
for node in agg_list:
if node['properties'].get('duration_seconds'):
node['properties']['duration'] = datetime.timedelta(seconds=node['properties']['duration_seconds'])
if node.get('_created') is not None:
node['pretty_created'] = pretty_date(node['_created'])
# If the user is anonymous, no more information is needed and we return
if current_user.is_anonymous:
return jsonify(agg_list)
# If the user is authenticated, attach view_progress for video assets
view_progress = current_user.nodes['view_progress']
for node in agg_list:
node_id = str(node['_id'])
# View progress should be added only for nodes of type 'asset' and
# with content_type 'video', only if the video was already in the watched
# list for the current user.
if node_id in view_progress:
node['view_progress'] = view_progress[node_id]
return jsonify(agg_list)
def _tagged(tag: str): def _tagged(tag: str):
@@ -150,20 +122,14 @@ def _tagged(tag: str):
'foreignField': '_id', 'foreignField': '_id',
'as': '_project', 'as': '_project',
}}, }},
{'$unwind': '$_project'},
{'$match': {'_project.is_private': False}}, {'$match': {'_project.is_private': False}},
{'$addFields': {
'project._id': '$_project._id',
'project.name': '$_project.name',
'project.url': '$_project.url',
}},
# Don't return the entire project/file for each node. # Don't return the entire project for each node.
{'$project': {'_project': False}}, {'$project': {'_project': False}},
{'$sort': {'_created': -1}} {'$sort': {'_created': -1}}
]) ])
return jsonify(list(agg))
return list(agg)
def generate_and_store_short_code(node): def generate_and_store_short_code(node):
@@ -241,6 +207,283 @@ def create_short_code(node) -> str:
return short_code return short_code
def short_link_info(short_code):
"""Returns the short link info in a dict."""
short_link = urllib.parse.urljoin(
current_app.config['SHORT_LINK_BASE_URL'], short_code)
return {
'short_code': short_code,
'short_link': short_link,
}
def before_replacing_node(item, original):
check_permissions('nodes', original, 'PUT')
update_file_name(item)
def after_replacing_node(item, original):
"""Push an update to the Algolia index when a node item is updated. If the
project is private, prevent public indexing.
"""
from pillar.celery import search_index_tasks as index
projects_collection = current_app.data.driver.db['projects']
project = projects_collection.find_one({'_id': item['project']})
if project.get('is_private', False):
# Skip index updating and return
return
status = item['properties'].get('status', 'unpublished')
node_id = str(item['_id'])
if status == 'published':
index.node_save.delay(node_id)
else:
index.node_delete.delay(node_id)
def before_inserting_nodes(items):
"""Before inserting a node in the collection we check if the user is allowed
and we append the project id to it.
"""
from pillar.auth import current_user
nodes_collection = current_app.data.driver.db['nodes']
def find_parent_project(node):
"""Recursive function that finds the ultimate parent of a node."""
if node and 'parent' in node:
parent = nodes_collection.find_one({'_id': node['parent']})
return find_parent_project(parent)
if node:
return node
else:
return None
for item in items:
check_permissions('nodes', item, 'POST')
if 'parent' in item and 'project' not in item:
parent = nodes_collection.find_one({'_id': item['parent']})
project = find_parent_project(parent)
if project:
item['project'] = project['_id']
# Default the 'user' property to the current user.
item.setdefault('user', current_user.user_id)
def after_inserting_nodes(items):
for item in items:
# Skip subscriptions for first level items (since the context is not a
# node, but a project).
# TODO: support should be added for mixed context
if 'parent' not in item:
return
context_object_id = item['parent']
if item['node_type'] == 'comment':
nodes_collection = current_app.data.driver.db['nodes']
parent = nodes_collection.find_one({'_id': item['parent']})
# Always subscribe to the parent node
activity_subscribe(item['user'], 'node', item['parent'])
if parent['node_type'] == 'comment':
# If the parent is a comment, we provide its own parent as
# context. We do this in order to point the user to an asset
# or group when viewing the notification.
verb = 'replied'
context_object_id = parent['parent']
# Subscribe to the parent of the parent comment (post or group)
activity_subscribe(item['user'], 'node', parent['parent'])
else:
activity_subscribe(item['user'], 'node', item['_id'])
verb = 'commented'
elif item['node_type'] in PILLAR_NAMED_NODE_TYPES:
verb = 'posted'
activity_subscribe(item['user'], 'node', item['_id'])
else:
# Don't automatically create activities for non-Pillar node types,
# as we don't know what would be a suitable verb (among other things).
continue
activity_object_add(
item['user'],
verb,
'node',
item['_id'],
'node',
context_object_id
)
def deduct_content_type(node_doc, original=None):
"""Deduct the content type from the attached file, if any."""
if node_doc['node_type'] != 'asset':
log.debug('deduct_content_type: called on node type %r, ignoring', node_doc['node_type'])
return
node_id = node_doc.get('_id')
try:
file_id = ObjectId(node_doc['properties']['file'])
except KeyError:
if node_id is None:
# Creation of a file-less node is allowed, but updates aren't.
return
log.warning('deduct_content_type: Asset without properties.file, rejecting.')
raise wz_exceptions.UnprocessableEntity('Missing file property for asset node')
files = current_app.data.driver.db['files']
file_doc = files.find_one({'_id': file_id},
{'content_type': 1})
if not file_doc:
log.warning('deduct_content_type: Node %s refers to non-existing file %s, rejecting.',
node_id, file_id)
raise wz_exceptions.UnprocessableEntity('File property refers to non-existing file')
# Guess the node content type from the file content type
file_type = file_doc['content_type']
if file_type.startswith('video/'):
content_type = 'video'
elif file_type.startswith('image/'):
content_type = 'image'
else:
content_type = 'file'
node_doc['properties']['content_type'] = content_type
def nodes_deduct_content_type(nodes):
for node in nodes:
deduct_content_type(node)
def before_returning_node(node):
# Run validation process, since GET on nodes entry point is public
check_permissions('nodes', node, 'GET', append_allowed_methods=True)
# Embed short_link_info if the node has a short_code.
short_code = node.get('short_code')
if short_code:
node['short_link'] = short_link_info(short_code)['short_link']
def before_returning_nodes(nodes):
for node in nodes['_items']:
before_returning_node(node)
def node_set_default_picture(node, original=None):
"""Uses the image of an image asset or colour map of texture node as picture."""
if node.get('picture'):
log.debug('Node %s already has a picture, not overriding', node.get('_id'))
return
node_type = node.get('node_type')
props = node.get('properties', {})
content = props.get('content_type')
if node_type == 'asset' and content == 'image':
image_file_id = props.get('file')
elif node_type == 'texture':
# Find the colour map, defaulting to the first image map available.
image_file_id = None
for image in props.get('files', []):
if image_file_id is None or image.get('map_type') == 'color':
image_file_id = image.get('file')
else:
log.debug('Not setting default picture on node type %s content type %s',
node_type, content)
return
if image_file_id is None:
log.debug('Nothing to set the picture to.')
return
log.debug('Setting default picture for node %s to %s', node.get('_id'), image_file_id)
node['picture'] = image_file_id
def nodes_set_default_picture(nodes):
for node in nodes:
node_set_default_picture(node)
def before_deleting_node(node: dict):
check_permissions('nodes', node, 'DELETE')
def after_deleting_node(item):
from pillar.celery import search_index_tasks as index
index.node_delete.delay(str(item['_id']))
only_for_textures = only_for_node_type_decorator('texture')
@only_for_textures
def texture_sort_files(node, original=None):
"""Sort files alphabetically by map type, with colour map first."""
try:
files = node['properties']['files']
except KeyError:
return
# Sort the map types alphabetically, ensuring 'color' comes first.
as_dict = {f['map_type']: f for f in files}
types = sorted(as_dict.keys(), key=lambda k: '\0' if k == 'color' else k)
node['properties']['files'] = [as_dict[map_type] for map_type in types]
def textures_sort_files(nodes):
for node in nodes:
texture_sort_files(node)
def parse_markdown(node, original=None):
import copy
projects_collection = current_app.data.driver.db['projects']
project = projects_collection.find_one({'_id': node['project']}, {'node_types': 1})
# Query node type directly using the key
node_type = next(nt for nt in project['node_types']
if nt['name'] == node['node_type'])
# Create a copy to not overwrite the actual schema.
schema = copy.deepcopy(current_app.config['DOMAIN']['nodes']['schema'])
schema['properties'] = node_type['dyn_schema']
def find_markdown_fields(schema, node):
"""Find and process all makrdown validated fields."""
for k, v in schema.items():
if not isinstance(v, dict):
continue
if v.get('validator') == 'markdown':
# If there is a match with the validator: markdown pair, assign the sibling
# property (following the naming convention _<property>_html)
# the processed value.
if k in node:
html = pillar.markdown.markdown(node[k])
field_name = pillar.markdown.cache_field_name(k)
node[field_name] = html
if isinstance(node, dict) and k in node:
find_markdown_fields(v, node[k])
find_markdown_fields(schema, node)
return 'ok'
def parse_markdowns(items):
for item in items:
parse_markdown(item)
def setup_app(app, url_prefix): def setup_app(app, url_prefix):
global _tagged global _tagged
@@ -250,26 +493,26 @@ def setup_app(app, url_prefix):
from . import patch from . import patch
patch.setup_app(app, url_prefix=url_prefix) patch.setup_app(app, url_prefix=url_prefix)
app.on_fetched_item_nodes += eve_hooks.before_returning_node app.on_fetched_item_nodes += before_returning_node
app.on_fetched_resource_nodes += eve_hooks.before_returning_nodes app.on_fetched_resource_nodes += before_returning_nodes
app.on_replace_nodes += eve_hooks.before_replacing_node app.on_replace_nodes += before_replacing_node
app.on_replace_nodes += eve_hooks.texture_sort_files app.on_replace_nodes += parse_markdown
app.on_replace_nodes += eve_hooks.deduct_content_type_and_duration app.on_replace_nodes += texture_sort_files
app.on_replace_nodes += eve_hooks.node_set_default_picture app.on_replace_nodes += deduct_content_type
app.on_replaced_nodes += eve_hooks.after_replacing_node app.on_replace_nodes += node_set_default_picture
app.on_replaced_nodes += after_replacing_node
app.on_insert_nodes += eve_hooks.before_inserting_nodes app.on_insert_nodes += before_inserting_nodes
app.on_insert_nodes += eve_hooks.nodes_deduct_content_type_and_duration app.on_insert_nodes += parse_markdowns
app.on_insert_nodes += eve_hooks.nodes_set_default_picture app.on_insert_nodes += nodes_deduct_content_type
app.on_insert_nodes += eve_hooks.textures_sort_files app.on_insert_nodes += nodes_set_default_picture
app.on_inserted_nodes += eve_hooks.after_inserting_nodes app.on_insert_nodes += textures_sort_files
app.on_inserted_nodes += after_inserting_nodes
app.on_update_nodes += eve_hooks.texture_sort_files app.on_update_nodes += texture_sort_files
app.on_delete_item_nodes += eve_hooks.before_deleting_node app.on_delete_item_nodes += before_deleting_node
app.on_deleted_item_nodes += eve_hooks.after_deleting_node app.on_deleted_item_nodes += after_deleting_node
app.register_api_blueprint(blueprint, url_prefix=url_prefix) app.register_api_blueprint(blueprint, url_prefix=url_prefix)
activities.setup_app(app)

View File

@@ -1,43 +0,0 @@
from eve.methods import get
from pillar.api.utils import gravatar
def for_node(node_id):
activities, _, _, status, _ =\
get('activities',
{
'$or': [
{'object_type': 'node',
'object': node_id},
{'context_object_type': 'node',
'context_object': node_id},
],
},)
for act in activities['_items']:
act['actor_user'] = _user_info(act['actor_user'])
return activities
def _user_info(user_id):
users, _, _, status, _ = get('users', {'_id': user_id})
if len(users['_items']) > 0:
user = users['_items'][0]
user['gravatar'] = gravatar(user['email'])
public_fields = {'full_name', 'username', 'gravatar'}
for field in list(user.keys()):
if field not in public_fields:
del user[field]
return user
return {}
def setup_app(app):
global _user_info
decorator = app.cache.memoize(timeout=300, make_name='%s.public_user_info' % __name__)
_user_info = decorator(_user_info)

View File

@@ -1,298 +0,0 @@
import logging
from datetime import datetime
import pymongo
import typing
import bson
import attr
import werkzeug.exceptions as wz_exceptions
import pillar
from pillar import current_app, shortcodes
from pillar.api.nodes.custom.comment import patch_comment
from pillar.api.utils import jsonify, gravatar
from pillar.auth import current_user
log = logging.getLogger(__name__)
@attr.s(auto_attribs=True)
class UserDO:
id: str
full_name: str
gravatar: str
badges_html: str
@attr.s(auto_attribs=True)
class CommentPropertiesDO:
attachments: typing.Dict
rating_positive: int = 0
rating_negative: int = 0
@attr.s(auto_attribs=True)
class CommentDO:
id: bson.ObjectId
parent: bson.ObjectId
project: bson.ObjectId
user: UserDO
msg_html: str
msg_markdown: str
properties: CommentPropertiesDO
created: datetime
updated: datetime
etag: str
replies: typing.List['CommentDO'] = []
current_user_rating: typing.Optional[bool] = None
@attr.s(auto_attribs=True)
class CommentTreeDO:
node_id: bson.ObjectId
project: bson.ObjectId
nbr_of_comments: int = 0
comments: typing.List[CommentDO] = []
def _get_markdowned_html(document: dict, field_name: str) -> str:
cache_field_name = pillar.markdown.cache_field_name(field_name)
html = document.get(cache_field_name)
if html is None:
markdown_src = document.get(field_name) or ''
html = pillar.markdown.markdown(markdown_src)
return html
def jsonify_data_object(data_object: attr):
return jsonify(
attr.asdict(data_object,
recurse=True)
)
class CommentTreeBuilder:
def __init__(self, node_id: bson.ObjectId):
self.node_id = node_id
self.nbr_of_Comments: int = 0
def build(self) -> CommentTreeDO:
enriched_comments = self.child_comments(self.node_id,
sort={'properties.rating_positive': pymongo.DESCENDING,
'_created': pymongo.DESCENDING})
project_id = self.get_project_id()
return CommentTreeDO(
node_id=self.node_id,
project=project_id,
nbr_of_comments=self.nbr_of_Comments,
comments=enriched_comments
)
def child_comments(self, node_id: bson.ObjectId, sort: dict) -> typing.List[CommentDO]:
raw_comments = self.mongodb_comments(node_id, sort)
return [self.enrich(comment) for comment in raw_comments]
def enrich(self, mongo_comment: dict) -> CommentDO:
self.nbr_of_Comments += 1
comment = to_comment_data_object(mongo_comment)
comment.replies = self.child_comments(mongo_comment['_id'],
sort={'_created': pymongo.ASCENDING})
return comment
def get_project_id(self):
nodes_coll = current_app.db('nodes')
result = nodes_coll.find_one({'_id': self.node_id})
return result['project']
@classmethod
def mongodb_comments(cls, node_id: bson.ObjectId, sort: dict) -> typing.Iterator:
nodes_coll = current_app.db('nodes')
return nodes_coll.aggregate([
{'$match': {'node_type': 'comment',
'_deleted': {'$ne': True},
'properties.status': 'published',
'parent': node_id}},
{'$lookup': {"from": "users",
"localField": "user",
"foreignField": "_id",
"as": "user"}},
{'$unwind': {'path': "$user"}},
{'$sort': sort},
])
def get_node_comments(node_id: bson.ObjectId):
comments_tree = CommentTreeBuilder(node_id).build()
return jsonify_data_object(comments_tree)
def post_node_comment(parent_id: bson.ObjectId, markdown_msg: str, attachments: dict):
parent_node = find_node_or_raise(parent_id,
'User %s tried to update comment with bad parent_id %s',
current_user.objectid,
parent_id)
is_reply = parent_node['node_type'] == 'comment'
comment = dict(
parent=parent_id,
project=parent_node['project'],
name='Comment',
user=current_user.objectid,
node_type='comment',
properties=dict(
content=markdown_msg,
status='published',
is_reply=is_reply,
confidence=0,
rating_positive=0,
rating_negative=0,
attachments=attachments,
),
permissions=dict(
users=[dict(
user=current_user.objectid,
methods=['PUT'])
]
)
)
r, _, _, status = current_app.post_internal('nodes', comment)
if status != 201:
log.warning('Unable to post comment on %s as %s: %s',
parent_id, current_user.objectid, r)
raise wz_exceptions.InternalServerError('Unable to create comment')
comment_do = get_comment(parent_id, r['_id'])
return jsonify_data_object(comment_do), 201
def find_node_or_raise(node_id, *args):
nodes_coll = current_app.db('nodes')
node_to_comment = nodes_coll.find_one({
'_id': node_id,
'_deleted': {'$ne': True},
})
if not node_to_comment:
log.warning(args)
raise wz_exceptions.UnprocessableEntity()
return node_to_comment
def patch_node_comment(parent_id: bson.ObjectId, comment_id: bson.ObjectId, markdown_msg: str, attachments: dict):
_, _ = find_parent_and_comment_or_raise(parent_id, comment_id)
patch = dict(
op='edit',
content=markdown_msg,
attachments=attachments
)
json_result = patch_comment(comment_id, patch)
if json_result.json['result'] != 200:
raise wz_exceptions.InternalServerError('Failed to update comment')
comment_do = get_comment(parent_id, comment_id)
return jsonify_data_object(comment_do), 200
def find_parent_and_comment_or_raise(parent_id, comment_id):
parent = find_node_or_raise(parent_id,
'User %s tried to update comment with bad parent_id %s',
current_user.objectid,
parent_id)
comment = find_node_or_raise(comment_id,
'User %s tried to update comment with bad id %s',
current_user.objectid,
comment_id)
validate_comment_parent_relation(comment, parent)
return parent, comment
def validate_comment_parent_relation(comment, parent):
if comment['parent'] != parent['_id']:
log.warning('User %s tried to update comment with bad parent/comment pair. parent_id: %s comment_id: %s',
current_user.objectid,
parent['_id'],
comment['_id'])
raise wz_exceptions.BadRequest()
def get_comment(parent_id: bson.ObjectId, comment_id: bson.ObjectId) -> CommentDO:
nodes_coll = current_app.db('nodes')
mongo_comment = list(nodes_coll.aggregate([
{'$match': {'node_type': 'comment',
'_deleted': {'$ne': True},
'properties.status': 'published',
'parent': parent_id,
'_id': comment_id}},
{'$lookup': {"from": "users",
"localField": "user",
"foreignField": "_id",
"as": "user"}},
{'$unwind': {'path': "$user"}},
]))[0]
return to_comment_data_object(mongo_comment)
def to_comment_data_object(mongo_comment: dict) -> CommentDO:
def current_user_rating():
if current_user.is_authenticated:
for rating in mongo_comment['properties'].get('ratings', ()):
if str(rating['user']) != current_user.objectid:
continue
return rating['is_positive']
return None
user_dict = mongo_comment['user']
user = UserDO(
id=str(mongo_comment['user']['_id']),
full_name=user_dict['full_name'],
gravatar=gravatar(user_dict['email']),
badges_html=user_dict.get('badges', {}).get('html', '')
)
html = _get_markdowned_html(mongo_comment['properties'], 'content')
html = shortcodes.render_commented(html, context=mongo_comment['properties'])
return CommentDO(
id=mongo_comment['_id'],
parent=mongo_comment['parent'],
project=mongo_comment['project'],
user=user,
msg_html=html,
msg_markdown=mongo_comment['properties']['content'],
current_user_rating=current_user_rating(),
created=mongo_comment['_created'],
updated=mongo_comment['_updated'],
etag=mongo_comment['_etag'],
properties=CommentPropertiesDO(
attachments=mongo_comment['properties'].get('attachments', {}),
rating_positive=mongo_comment['properties']['rating_positive'],
rating_negative=mongo_comment['properties']['rating_negative']
)
)
def post_node_comment_vote(parent_id: bson.ObjectId, comment_id: bson.ObjectId, vote: int):
normalized_vote = min(max(vote, -1), 1)
_, _ = find_parent_and_comment_or_raise(parent_id, comment_id)
actions = {
1: 'upvote',
0: 'revoke',
-1: 'downvote',
}
patch = dict(
op=actions[normalized_vote]
)
json_result = patch_comment(comment_id, patch)
if json_result.json['_status'] != 'OK':
raise wz_exceptions.InternalServerError('Failed to vote on comment')
comment_do = get_comment(parent_id, comment_id)
return jsonify_data_object(comment_do), 200

View File

@@ -5,7 +5,7 @@ import logging
from flask import current_app from flask import current_app
import werkzeug.exceptions as wz_exceptions import werkzeug.exceptions as wz_exceptions
from pillar.api.utils import authorization, authentication, jsonify, remove_private_keys from pillar.api.utils import authorization, authentication, jsonify
from . import register_patch_handler from . import register_patch_handler
@@ -135,7 +135,10 @@ def edit_comment(user_id, node_id, patch):
# we can pass this stuff to Eve's patch_internal; that way the validation & # we can pass this stuff to Eve's patch_internal; that way the validation &
# authorisation system has enough info to work. # authorisation system has enough info to work.
nodes_coll = current_app.data.driver.db['nodes'] nodes_coll = current_app.data.driver.db['nodes']
node = nodes_coll.find_one(node_id) projection = {'user': 1,
'project': 1,
'node_type': 1}
node = nodes_coll.find_one(node_id, projection=projection)
if node is None: if node is None:
log.warning('User %s wanted to patch non-existing node %s' % (user_id, node_id)) log.warning('User %s wanted to patch non-existing node %s' % (user_id, node_id))
raise wz_exceptions.NotFound('Node %s not found' % node_id) raise wz_exceptions.NotFound('Node %s not found' % node_id)
@@ -143,12 +146,12 @@ def edit_comment(user_id, node_id, patch):
if node['user'] != user_id and not authorization.user_has_role('admin'): if node['user'] != user_id and not authorization.user_has_role('admin'):
raise wz_exceptions.Forbidden('You can only edit your own comments.') raise wz_exceptions.Forbidden('You can only edit your own comments.')
node = remove_private_keys(node) # Use Eve to PATCH this node, as that also updates the etag.
node['properties']['content'] = patch['content'] r, _, _, status = current_app.patch_internal('nodes',
node['properties']['attachments'] = patch.get('attachments', {}) {'properties.content': patch['content'],
# Use Eve to PUT this node, as that also updates the etag and we want to replace attachments. 'project': node['project'],
r, _, _, status = current_app.put_internal('nodes', 'user': node['user'],
node, 'node_type': node['node_type']},
concurrency_check=False, concurrency_check=False,
_id=node_id) _id=node_id)
if status != 200: if status != 200:

View File

@@ -1,352 +0,0 @@
import collections
import functools
import logging
import urllib.parse
from bson import ObjectId
from werkzeug import exceptions as wz_exceptions
from pillar import current_app
from pillar.api.activities import activity_subscribe, activity_object_add
from pillar.api.file_storage_backends.gcs import update_file_name
from pillar.api.node_types import PILLAR_NAMED_NODE_TYPES
from pillar.api.utils import random_etag
from pillar.api.utils.authorization import check_permissions
log = logging.getLogger(__name__)
def before_returning_node(node):
# Run validation process, since GET on nodes entry point is public
check_permissions('nodes', node, 'GET', append_allowed_methods=True)
# Embed short_link_info if the node has a short_code.
short_code = node.get('short_code')
if short_code:
node['short_link'] = short_link_info(short_code)['short_link']
def before_returning_nodes(nodes):
for node in nodes['_items']:
before_returning_node(node)
def only_for_node_type_decorator(*required_node_type_names):
"""Returns a decorator that checks its first argument's node type.
If the node type is not of the required node type, returns None,
otherwise calls the wrapped function.
>>> deco = only_for_node_type_decorator('comment')
>>> @deco
... def handle_comment(node): pass
>>> deco = only_for_node_type_decorator('comment', 'post')
>>> @deco
... def handle_comment_or_post(node): pass
"""
# Convert to a set for efficient 'x in required_node_type_names' queries.
required_node_type_names = set(required_node_type_names)
def only_for_node_type(wrapped):
@functools.wraps(wrapped)
def wrapper(node, *args, **kwargs):
if node.get('node_type') not in required_node_type_names:
return
return wrapped(node, *args, **kwargs)
return wrapper
only_for_node_type.__doc__ = "Decorator, immediately returns when " \
"the first argument is not of type %s." % required_node_type_names
return only_for_node_type
def before_replacing_node(item, original):
check_permissions('nodes', original, 'PUT')
update_file_name(item)
# XXX Dillo specific feature (for Graphicall)
if 'download' in original['properties']:
# Check if the file referenced in the download property was updated.
# If so, mark the old file as deleted. A cronjob will take care of
# removing the actual file based on the _delete status of file docs.
original_file_id = original['properties']['download']
new_file_id = item['properties']['download']
if original_file_id == new_file_id:
return
# Mark the original file as _deleted
files = current_app.data.driver.db['files']
files.update_one({'_id': original_file_id}, {'$set': {'_deleted': True}})
log.info('Marking file %s as _deleted' % original_file_id)
def after_replacing_node(item, original):
"""Push an update to the Algolia index when a node item is updated. If the
project is private, prevent public indexing.
"""
from pillar.celery import search_index_tasks as index
projects_collection = current_app.data.driver.db['projects']
project = projects_collection.find_one({'_id': item['project']})
if project.get('is_private', False):
# Skip index updating and return
return
status = item['properties'].get('status', 'unpublished')
node_id = str(item['_id'])
if status == 'published':
index.node_save.delay(node_id)
else:
index.node_delete.delay(node_id)
def before_inserting_nodes(items):
"""Before inserting a node in the collection we check if the user is allowed
and we append the project id to it.
"""
from pillar.auth import current_user
nodes_collection = current_app.data.driver.db['nodes']
def find_parent_project(node):
"""Recursive function that finds the ultimate parent of a node."""
if node and 'parent' in node:
parent = nodes_collection.find_one({'_id': node['parent']})
return find_parent_project(parent)
if node:
return node
else:
return None
for item in items:
check_permissions('nodes', item, 'POST')
if 'parent' in item and 'project' not in item:
parent = nodes_collection.find_one({'_id': item['parent']})
project = find_parent_project(parent)
if project:
item['project'] = project['_id']
# Default the 'user' property to the current user.
item.setdefault('user', current_user.user_id)
def get_comment_verb_and_context_object_id(comment):
nodes_collection = current_app.data.driver.db['nodes']
verb = 'commented'
parent = nodes_collection.find_one({'_id': comment['parent']})
context_object_id = comment['parent']
while parent['node_type'] == 'comment':
# If the parent is a comment, we provide its own parent as
# context. We do this in order to point the user to an asset
# or group when viewing the notification.
verb = 'replied'
context_object_id = parent['parent']
parent = nodes_collection.find_one({'_id': parent['parent']})
return verb, context_object_id
def after_inserting_nodes(items):
for item in items:
context_object_id = None
# TODO: support should be added for mixed context
if item['node_type'] in PILLAR_NAMED_NODE_TYPES:
activity_subscribe(item['user'], 'node', item['_id'])
verb = 'posted'
context_object_id = item.get('parent')
if item['node_type'] == 'comment':
# Always subscribe to the parent node
activity_subscribe(item['user'], 'node', item['parent'])
verb, context_object_id = get_comment_verb_and_context_object_id(item)
# Subscribe to the parent of the parent comment (post or group)
activity_subscribe(item['user'], 'node', context_object_id)
if context_object_id and item['node_type'] in PILLAR_NAMED_NODE_TYPES:
# * Skip activity for first level items (since the context is not a
# node, but a project).
# * Don't automatically create activities for non-Pillar node types,
# as we don't know what would be a suitable verb (among other things).
activity_object_add(
item['user'],
verb,
'node',
item['_id'],
'node',
context_object_id
)
def deduct_content_type_and_duration(node_doc, original=None):
"""Deduct the content type from the attached file, if any."""
if node_doc['node_type'] != 'asset':
log.debug('deduct_content_type: called on node type %r, ignoring', node_doc['node_type'])
return
node_id = node_doc.get('_id')
try:
file_id = ObjectId(node_doc['properties']['file'])
except KeyError:
if node_id is None:
# Creation of a file-less node is allowed, but updates aren't.
return
log.warning('deduct_content_type: Asset without properties.file, rejecting.')
raise wz_exceptions.UnprocessableEntity('Missing file property for asset node')
files = current_app.data.driver.db['files']
file_doc = files.find_one({'_id': file_id},
{'content_type': 1,
'variations': 1})
if not file_doc:
log.warning('deduct_content_type: Node %s refers to non-existing file %s, rejecting.',
node_id, file_id)
raise wz_exceptions.UnprocessableEntity('File property refers to non-existing file')
# Guess the node content type from the file content type
file_type = file_doc['content_type']
if file_type.startswith('video/'):
content_type = 'video'
elif file_type.startswith('image/'):
content_type = 'image'
else:
content_type = 'file'
node_doc['properties']['content_type'] = content_type
if content_type == 'video':
duration = file_doc['variations'][0].get('duration')
if duration:
node_doc['properties']['duration_seconds'] = duration
else:
log.warning('Video file %s has no duration', file_id)
def nodes_deduct_content_type_and_duration(nodes):
for node in nodes:
deduct_content_type_and_duration(node)
def node_set_default_picture(node, original=None):
"""Uses the image of an image asset or colour map of texture node as picture."""
if node.get('picture'):
log.debug('Node %s already has a picture, not overriding', node.get('_id'))
return
node_type = node.get('node_type')
props = node.get('properties', {})
content = props.get('content_type')
if node_type == 'asset' and content == 'image':
image_file_id = props.get('file')
elif node_type == 'texture':
# Find the colour map, defaulting to the first image map available.
image_file_id = None
for image in props.get('files', []):
if image_file_id is None or image.get('map_type') == 'color':
image_file_id = image.get('file')
else:
log.debug('Not setting default picture on node type %s content type %s',
node_type, content)
return
if image_file_id is None:
log.debug('Nothing to set the picture to.')
return
log.debug('Setting default picture for node %s to %s', node.get('_id'), image_file_id)
node['picture'] = image_file_id
def nodes_set_default_picture(nodes):
for node in nodes:
node_set_default_picture(node)
def before_deleting_node(node: dict):
check_permissions('nodes', node, 'DELETE')
remove_project_references(node)
def remove_project_references(node):
project_id = node.get('project')
if not project_id:
return
node_id = node['_id']
log.info('Removing references to node %s from project %s', node_id, project_id)
projects_col = current_app.db('projects')
project = projects_col.find_one({'_id': project_id})
updates = collections.defaultdict(dict)
if project.get('header_node') == node_id:
updates['$unset']['header_node'] = node_id
project_reference_lists = ('nodes_blog', 'nodes_featured', 'nodes_latest')
for list_name in project_reference_lists:
references = project.get(list_name)
if not references:
continue
try:
references.remove(node_id)
except ValueError:
continue
updates['$set'][list_name] = references
if not updates:
return
updates['$set']['_etag'] = random_etag()
result = projects_col.update_one({'_id': project_id}, updates)
if result.modified_count != 1:
log.warning('Removing references to node %s from project %s resulted in %d modified documents (expected 1)',
node_id, project_id, result.modified_count)
def after_deleting_node(item):
from pillar.celery import search_index_tasks as index
index.node_delete.delay(str(item['_id']))
only_for_textures = only_for_node_type_decorator('texture')
@only_for_textures
def texture_sort_files(node, original=None):
"""Sort files alphabetically by map type, with colour map first."""
try:
files = node['properties']['files']
except KeyError:
return
# Sort the map types alphabetically, ensuring 'color' comes first.
as_dict = {f['map_type']: f for f in files}
types = sorted(as_dict.keys(), key=lambda k: '\0' if k == 'color' else k)
node['properties']['files'] = [as_dict[map_type] for map_type in types]
def textures_sort_files(nodes):
for node in nodes:
texture_sort_files(node)
def short_link_info(short_code):
"""Returns the short link info in a dict."""
short_link = urllib.parse.urljoin(
current_app.config['SHORT_LINK_BASE_URL'], short_code)
return {
'short_code': short_code,
'short_link': short_link,
}

View File

@@ -9,7 +9,6 @@ def setup_app(app, api_prefix):
app.on_replace_projects += hooks.override_is_private_field app.on_replace_projects += hooks.override_is_private_field
app.on_replace_projects += hooks.before_edit_check_permissions app.on_replace_projects += hooks.before_edit_check_permissions
app.on_replace_projects += hooks.protect_sensitive_fields app.on_replace_projects += hooks.protect_sensitive_fields
app.on_replace_projects += hooks.parse_markdown
app.on_update_projects += hooks.override_is_private_field app.on_update_projects += hooks.override_is_private_field
app.on_update_projects += hooks.before_edit_check_permissions app.on_update_projects += hooks.before_edit_check_permissions
@@ -20,8 +19,6 @@ def setup_app(app, api_prefix):
app.on_insert_projects += hooks.before_inserting_override_is_private_field app.on_insert_projects += hooks.before_inserting_override_is_private_field
app.on_insert_projects += hooks.before_inserting_projects app.on_insert_projects += hooks.before_inserting_projects
app.on_insert_projects += hooks.parse_markdowns
app.on_inserted_projects += hooks.after_inserting_projects app.on_inserted_projects += hooks.after_inserting_projects
app.on_fetched_item_projects += hooks.before_returning_project_permissions app.on_fetched_item_projects += hooks.before_returning_project_permissions

View File

@@ -3,7 +3,6 @@ import logging
from flask import request, abort from flask import request, abort
import pillar
from pillar import current_app from pillar import current_app
from pillar.api.node_types.asset import node_type_asset from pillar.api.node_types.asset import node_type_asset
from pillar.api.node_types.comment import node_type_comment from pillar.api.node_types.comment import node_type_comment
@@ -247,37 +246,3 @@ def project_node_type_has_method(response):
def projects_node_type_has_method(response): def projects_node_type_has_method(response):
for project in response['_items']: for project in response['_items']:
project_node_type_has_method(project) project_node_type_has_method(project)
def parse_markdown(project, original=None):
schema = current_app.config['DOMAIN']['projects']['schema']
def find_markdown_fields(schema, project):
"""Find and process all Markdown coerced fields.
- look for fields with a 'coerce': 'markdown' property
- parse the name of the field and generate the sibling field name (_<field_name>_html -> <field_name>)
- parse the content of the <field_name> field as markdown and save it in _<field_name>_html
"""
for field_name, field_value in schema.items():
if not isinstance(field_value, dict):
continue
if field_value.get('coerce') != 'markdown':
continue
if field_name not in project:
continue
# Construct markdown source field name (strip the leading '_' and the trailing '_html')
source_field_name = field_name[1:-5]
html = pillar.markdown.markdown(project[source_field_name])
project[field_name] = html
if isinstance(project, dict) and field_name in project:
find_markdown_fields(field_value, project[field_name])
find_markdown_fields(schema, project)
def parse_markdowns(items):
for item in items:
parse_markdown(item)

View File

@@ -7,7 +7,6 @@ from werkzeug.exceptions import abort
from pillar import current_app from pillar import current_app
from pillar.auth import current_user from pillar.auth import current_user
from pillar.api import file_storage_backends
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@@ -156,18 +155,6 @@ def project_id(project_url: str) -> ObjectId:
return proj['_id'] return proj['_id']
def get_project_url(project_id: ObjectId) -> str:
"""Returns the project URL, or raises a ValueError when not found."""
proj_coll = current_app.db('projects')
proj = proj_coll.find_one({'_id': project_id, '_deleted': {'$ne': True}},
projection={'url': True})
if not proj:
raise ValueError(f'project with id={project_id} not found')
return proj['url']
def get_project(project_url: str) -> dict: def get_project(project_url: str) -> dict:
"""Find a project in the database, raises ValueError if not found. """Find a project in the database, raises ValueError if not found.
@@ -200,14 +187,3 @@ def put_project(project: dict):
if status_code != 200: if status_code != 200:
raise ValueError(f"Can't update project {pid}, " raise ValueError(f"Can't update project {pid}, "
f"status {status_code} with issues: {result}") f"status {status_code} with issues: {result}")
def storage(project_id: ObjectId) -> file_storage_backends.Bucket:
"""Return the storage bucket for this project.
For now this returns a bucket in the default storage backend, since
individual projects do not have a 'storage backend' setting (this is
set per file, not per project).
"""
return file_storage_backends.default_storage_backend(str(project_id))

View File

@@ -81,7 +81,6 @@ class Node(es.DocType):
fields={ fields={
'id': es.Keyword(), 'id': es.Keyword(),
'name': es.Keyword(), 'name': es.Keyword(),
'url': es.Keyword(),
} }
) )
@@ -154,21 +153,18 @@ def create_doc_from_node_data(node_to_index: dict) -> typing.Optional[Node]:
doc.objectID = str(node_to_index['objectID']) doc.objectID = str(node_to_index['objectID'])
doc.node_type = node_to_index['node_type'] doc.node_type = node_to_index['node_type']
doc.name = node_to_index['name'] doc.name = node_to_index['name']
doc.description = node_to_index.get('description')
doc.user.id = str(node_to_index['user']['_id']) doc.user.id = str(node_to_index['user']['_id'])
doc.user.name = node_to_index['user']['full_name'] doc.user.name = node_to_index['user']['full_name']
doc.project.id = str(node_to_index['project']['_id']) doc.project.id = str(node_to_index['project']['_id'])
doc.project.name = node_to_index['project']['name'] doc.project.name = node_to_index['project']['name']
doc.project.url = node_to_index['project']['url']
if node_to_index['node_type'] == 'asset': if node_to_index['node_type'] == 'asset':
doc.media = node_to_index['media'] doc.media = node_to_index['media']
doc.picture = str(node_to_index.get('picture')) doc.picture = node_to_index.get('picture')
doc.tags = node_to_index.get('tags') doc.tags = node_to_index.get('tags')
doc.license_notes = node_to_index.get('license_notes') doc.license_notes = node_to_index.get('license_notes')
doc.is_free = node_to_index.get('is_free')
doc.created_at = node_to_index['created'] doc.created_at = node_to_index['created']
doc.updated_at = node_to_index['updated'] doc.updated_at = node_to_index['updated']

View File

@@ -3,18 +3,16 @@ import logging
import typing import typing
from elasticsearch import Elasticsearch from elasticsearch import Elasticsearch
from elasticsearch_dsl import Search, Q, MultiSearch from elasticsearch_dsl import Search, Q
from elasticsearch_dsl.query import Query from elasticsearch_dsl.query import Query
from pillar import current_app from pillar import current_app
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
BOOLEAN_TERMS = ['is_free'] NODE_AGG_TERMS = ['node_type', 'media', 'tags', 'is_free']
NODE_AGG_TERMS = ['node_type', 'media', 'tags', *BOOLEAN_TERMS]
USER_AGG_TERMS = ['roles', ] USER_AGG_TERMS = ['roles', ]
ITEMS_PER_PAGE = 10 ITEMS_PER_PAGE = 10
USER_SOURCE_INCLUDE = ['full_name', 'objectID', 'username']
# Will be set in setup_app() # Will be set in setup_app()
client: Elasticsearch = None client: Elasticsearch = None
@@ -29,25 +27,26 @@ def add_aggs_to_search(search, agg_terms):
search.aggs.bucket(term, 'terms', field=term) search.aggs.bucket(term, 'terms', field=term)
def make_filter(must: list, terms: dict) -> list: def make_must(must: list, terms: dict) -> list:
""" Given term parameters append must queries to the must list """ """ Given term parameters append must queries to the must list """
for field, value in terms.items(): for field, value in terms.items():
if value not in (None, ''): if value:
must.append({'term': {field: value}}) must.append({'match': {field: value}})
return must return must
def nested_bool(filters: list, should: list, terms: dict, *, index_alias: str) -> Search: def nested_bool(must: list, should: list, terms: dict, *, index_alias: str) -> Search:
""" """
Create a nested bool, where the aggregation selection is a must. Create a nested bool, where the aggregation selection is a must.
:param index_alias: 'USER' or 'NODE', see ELASTIC_INDICES config. :param index_alias: 'USER' or 'NODE', see ELASTIC_INDICES config.
""" """
filters = make_filter(filters, terms) must = make_must(must, terms)
bool_query = Q('bool', should=should) bool_query = Q('bool', should=should)
bool_query = Q('bool', must=bool_query, filter=filters) must.append(bool_query)
bool_query = Q('bool', must=must)
index = current_app.config['ELASTIC_INDICES'][index_alias] index = current_app.config['ELASTIC_INDICES'][index_alias]
search = Search(using=client, index=index) search = Search(using=client, index=index)
@@ -56,34 +55,12 @@ def nested_bool(filters: list, should: list, terms: dict, *, index_alias: str) -
return search return search
def do_multi_node_search(queries: typing.List[dict]) -> typing.List[dict]:
"""
Given user query input and term refinements
search for public published nodes
"""
search = create_multi_node_search(queries)
return _execute_multi(search)
def do_node_search(query: str, terms: dict, page: int, project_id: str='') -> dict: def do_node_search(query: str, terms: dict, page: int, project_id: str='') -> dict:
""" """
Given user query input and term refinements Given user query input and term refinements
search for public published nodes search for public published nodes
""" """
search = create_node_search(query, terms, page, project_id)
return _execute(search)
def create_multi_node_search(queries: typing.List[dict]) -> MultiSearch:
search = MultiSearch(using=client)
for q in queries:
search = search.add(create_node_search(**q))
return search
def create_node_search(query: str, terms: dict, page: int, project_id: str='') -> Search:
terms = _transform_terms(terms)
should = [ should = [
Q('match', name=query), Q('match', name=query),
@@ -94,30 +71,52 @@ def create_node_search(query: str, terms: dict, page: int, project_id: str='') -
Q('term', media=query), Q('term', media=query),
Q('term', tags=query), Q('term', tags=query),
] ]
filters = []
must = []
if project_id: if project_id:
filters.append({'term': {'project.id': project_id}}) must.append({'term': {'project.id': project_id}})
if not query: if not query:
should = [] should = []
search = nested_bool(filters, should, terms, index_alias='NODE')
search = nested_bool(must, should, terms, index_alias='NODE')
if not query: if not query:
search = search.sort('-created_at') search = search.sort('-created_at')
add_aggs_to_search(search, NODE_AGG_TERMS) add_aggs_to_search(search, NODE_AGG_TERMS)
search = paginate(search, page) search = paginate(search, page)
if log.isEnabledFor(logging.DEBUG): if log.isEnabledFor(logging.DEBUG):
log.debug(json.dumps(search.to_dict(), indent=4)) log.debug(json.dumps(search.to_dict(), indent=4))
return search
response = search.execute()
if log.isEnabledFor(logging.DEBUG):
log.debug(json.dumps(response.to_dict(), indent=4))
return response.to_dict()
def do_user_search(query: str, terms: dict, page: int) -> dict: def do_user_search(query: str, terms: dict, page: int) -> dict:
""" return user objects represented in elasicsearch result dict""" """ return user objects represented in elasicsearch result dict"""
search = create_user_search(query, terms, page) must, should = _common_user_search(query)
return _execute(search) search = nested_bool(must, should, terms, index_alias='USER')
add_aggs_to_search(search, USER_AGG_TERMS)
search = paginate(search, page)
if log.isEnabledFor(logging.DEBUG):
log.debug(json.dumps(search.to_dict(), indent=4))
response = search.execute()
if log.isEnabledFor(logging.DEBUG):
log.debug(json.dumps(response.to_dict(), indent=4))
return response.to_dict()
def _common_user_search(query: str) -> (typing.List[Query], typing.List[Query]): def _common_user_search(query: str) -> (typing.List[Query], typing.List[Query]):
"""Construct (filter,should) for regular + admin user search.""" """Construct (must,shoud) for regular + admin user search."""
if not query: if not query:
return [], [] return [], []
@@ -145,31 +144,8 @@ def do_user_search_admin(query: str, terms: dict, page: int) -> dict:
search all user fields and provide aggregation information search all user fields and provide aggregation information
""" """
search = create_user_admin_search(query, terms, page) must, should = _common_user_search(query)
return _execute(search)
def _execute(search: Search) -> dict:
if log.isEnabledFor(logging.DEBUG):
log.debug(json.dumps(search.to_dict(), indent=4))
resp = search.execute()
if log.isEnabledFor(logging.DEBUG):
log.debug(json.dumps(resp.to_dict(), indent=4))
return resp.to_dict()
def _execute_multi(search: typing.List[Search]) -> typing.List[dict]:
if log.isEnabledFor(logging.DEBUG):
log.debug(json.dumps(search.to_dict(), indent=4))
resp = search.execute()
if log.isEnabledFor(logging.DEBUG):
log.debug(json.dumps(resp.to_dict(), indent=4))
return [r.to_dict() for r in resp]
def create_user_admin_search(query: str, terms: dict, page: int) -> Search:
terms = _transform_terms(terms)
filters, should = _common_user_search(query)
if query: if query:
# We most likely got and id field. we should find it. # We most likely got and id field. we should find it.
if len(query) == len('563aca02c379cf0005e8e17d'): if len(query) == len('563aca02c379cf0005e8e17d'):
@@ -179,34 +155,26 @@ def create_user_admin_search(query: str, terms: dict, page: int) -> Search:
'boost': 100, # how much more it counts for the score 'boost': 100, # how much more it counts for the score
} }
}}) }})
search = nested_bool(filters, should, terms, index_alias='USER')
search = nested_bool(must, should, terms, index_alias='USER')
add_aggs_to_search(search, USER_AGG_TERMS) add_aggs_to_search(search, USER_AGG_TERMS)
search = paginate(search, page) search = paginate(search, page)
return search
if log.isEnabledFor(logging.DEBUG):
log.debug(json.dumps(search.to_dict(), indent=4))
def create_user_search(query: str, terms: dict, page: int) -> Search: response = search.execute()
search = create_user_admin_search(query, terms, page)
return search.source(include=USER_SOURCE_INCLUDE) if log.isEnabledFor(logging.DEBUG):
log.debug(json.dumps(response.to_dict(), indent=4))
return response.to_dict()
def paginate(search: Search, page_idx: int) -> Search: def paginate(search: Search, page_idx: int) -> Search:
return search[page_idx * ITEMS_PER_PAGE:(page_idx + 1) * ITEMS_PER_PAGE] return search[page_idx * ITEMS_PER_PAGE:(page_idx + 1) * ITEMS_PER_PAGE]
def _transform_terms(terms: dict) -> dict:
"""
Ugly hack! Elastic uses 1/0 for boolean values in its aggregate response,
but expects true/false in queries.
"""
transformed = terms.copy()
for t in BOOLEAN_TERMS:
orig = transformed.get(t)
if orig in ('1', '0'):
transformed[t] = bool(int(orig))
return transformed
def setup_app(app): def setup_app(app):
global client global client

View File

@@ -18,7 +18,7 @@ TERMS = [
] ]
def _term_filters(args) -> dict: def _term_filters() -> dict:
""" """
Check if frontent wants to filter stuff Check if frontent wants to filter stuff
on specific fields AKA facets on specific fields AKA facets
@@ -26,52 +26,35 @@ def _term_filters(args) -> dict:
return mapping with term field name return mapping with term field name
and provided user term value and provided user term value
""" """
return {term: args.get(term, '') for term in TERMS} return {term: request.args.get(term, '') for term in TERMS}
def _page_index(page) -> int: def _page_index() -> int:
"""Return the page index from the query string.""" """Return the page index from the query string."""
try: try:
page_idx = int(page) page_idx = int(request.args.get('page') or '0')
except TypeError: except TypeError:
log.info('invalid page number %r received', request.args.get('page')) log.info('invalid page number %r received', request.args.get('page'))
raise wz_exceptions.BadRequest() raise wz_exceptions.BadRequest()
return page_idx return page_idx
@blueprint_search.route('/', methods=['GET']) @blueprint_search.route('/')
def search_nodes(): def search_nodes():
searchword = request.args.get('q', '') searchword = request.args.get('q', '')
project_id = request.args.get('project', '') project_id = request.args.get('project', '')
terms = _term_filters(request.args) terms = _term_filters()
page_idx = _page_index(request.args.get('page', 0)) page_idx = _page_index()
result = queries.do_node_search(searchword, terms, page_idx, project_id) result = queries.do_node_search(searchword, terms, page_idx, project_id)
return jsonify(result) return jsonify(result)
@blueprint_search.route('/multisearch', methods=['POST'])
def multi_search_nodes():
if len(request.args) != 1:
log.info(f'Expected 1 argument, received {len(request.args)}')
json_obj = request.json
q = []
for row in json_obj:
q.append({
'query': row.get('q', ''),
'project_id': row.get('project', ''),
'terms': _term_filters(row),
'page': _page_index(row.get('page', 0))
})
result = queries.do_multi_node_search(q)
return jsonify(result)
@blueprint_search.route('/user') @blueprint_search.route('/user')
def search_user(): def search_user():
searchword = request.args.get('q', '') searchword = request.args.get('q', '')
terms = _term_filters(request.args) terms = _term_filters()
page_idx = _page_index(request.args.get('page', 0)) page_idx = _page_index()
# result is the raw elasticseach output. # result is the raw elasticseach output.
# we need to filter fields in case of user objects. # we need to filter fields in case of user objects.
@@ -82,6 +65,27 @@ def search_user():
resp.status_code = 500 resp.status_code = 500
return resp return resp
# filter sensitive stuff
# we only need. objectID, full_name, username
hits = result.get('hits', {})
new_hits = []
for hit in hits.get('hits'):
source = hit['_source']
single_hit = {
'_source': {
'objectID': source.get('objectID'),
'username': source.get('username'),
'full_name': source.get('full_name'),
}
}
new_hits.append(single_hit)
# replace search result with safe subset
result['hits']['hits'] = new_hits
return jsonify(result) return jsonify(result)
@@ -93,8 +97,8 @@ def search_user_admin():
""" """
searchword = request.args.get('q', '') searchword = request.args.get('q', '')
terms = _term_filters(request.args) terms = _term_filters()
page_idx = _page_index(_page_index(request.args.get('page', 0))) page_idx = _page_index()
try: try:
result = queries.do_user_search_admin(searchword, terms, page_idx) result = queries.do_user_search_admin(searchword, terms, page_idx)

View File

@@ -1,374 +0,0 @@
import itertools
import typing
from datetime import datetime
from operator import itemgetter
import attr
import bson
import pymongo
from flask import Blueprint, current_app, request, url_for
import pillar
from pillar import shortcodes
from pillar.api.utils import jsonify, pretty_duration, str2id
blueprint = Blueprint('timeline', __name__)
@attr.s(auto_attribs=True)
class TimelineDO:
groups: typing.List['GroupDO'] = []
continue_from: typing.Optional[float] = None
@attr.s(auto_attribs=True)
class GroupDO:
label: typing.Optional[str] = None
url: typing.Optional[str] = None
items: typing.Dict = {}
groups: typing.Iterable['GroupDO'] = []
class SearchHelper:
def __init__(self, nbr_of_weeks: int, continue_from: typing.Optional[datetime],
project_ids: typing.List[bson.ObjectId], sort_direction: str):
self._nbr_of_weeks = nbr_of_weeks
self._continue_from = continue_from
self._project_ids = project_ids
self.sort_direction = sort_direction
def _match(self, continue_from: typing.Optional[datetime]) -> dict:
created = {}
if continue_from:
if self.sort_direction == 'desc':
created = {'_created': {'$lt': continue_from}}
else:
created = {'_created': {'$gt': continue_from}}
return {'_deleted': {'$ne': True},
'node_type': {'$in': ['asset', 'post']},
'properties.status': {'$eq': 'published'},
'project': {'$in': self._project_ids},
**created,
}
def raw_weeks_from_mongo(self) -> pymongo.collection.Collection:
direction = pymongo.DESCENDING if self.sort_direction == 'desc' else pymongo.ASCENDING
nodes_coll = current_app.db('nodes')
return nodes_coll.aggregate([
{'$match': self._match(self._continue_from)},
{'$lookup': {"from": "projects",
"localField": "project",
"foreignField": "_id",
"as": "project"}},
{'$unwind': {'path': "$project"}},
{'$lookup': {"from": "users",
"localField": "user",
"foreignField": "_id",
"as": "user"}},
{'$unwind': {'path': "$user"}},
{'$project': {
'_created': 1,
'project._id': 1,
'project.url': 1,
'project.name': 1,
'user._id': 1,
'user.full_name': 1,
'name': 1,
'node_type': 1,
'picture': 1,
'properties': 1,
'permissions': 1,
}},
{'$group': {
'_id': {'year': {'$isoWeekYear': '$_created'},
'week': {'$isoWeek': '$_created'}},
'nodes': {'$push': '$$ROOT'}
}},
{'$sort': {'_id.year': direction,
'_id.week': direction}},
{'$limit': self._nbr_of_weeks}
])
def has_more(self, continue_from: datetime) -> bool:
nodes_coll = current_app.db('nodes')
result = nodes_coll.count(self._match(continue_from))
return bool(result)
class Grouper:
@classmethod
def label(cls, node):
return None
@classmethod
def url(cls, node):
return None
@classmethod
def group_key(cls) -> typing.Callable[[dict], typing.Any]:
raise NotImplemented()
@classmethod
def sort_key(cls) -> typing.Callable[[dict], typing.Any]:
raise NotImplemented()
class ProjectGrouper(Grouper):
@classmethod
def label(cls, project: dict):
return project['name']
@classmethod
def url(cls, project: dict):
return url_for('projects.view', project_url=project['url'])
@classmethod
def group_key(cls) -> typing.Callable[[dict], typing.Any]:
return itemgetter('project')
@classmethod
def sort_key(cls) -> typing.Callable[[dict], typing.Any]:
return lambda node: node['project']['_id']
class UserGrouper(Grouper):
@classmethod
def label(cls, user):
return user['full_name']
@classmethod
def group_key(cls) -> typing.Callable[[dict], typing.Any]:
return itemgetter('user')
@classmethod
def sort_key(cls) -> typing.Callable[[dict], typing.Any]:
return lambda node: node['user']['_id']
class TimeLineBuilder:
def __init__(self, search_helper: SearchHelper, grouper: typing.Type[Grouper]):
self.search_helper = search_helper
self.grouper = grouper
self.continue_from = None
def build(self) -> TimelineDO:
raw_weeks = self.search_helper.raw_weeks_from_mongo()
clean_weeks = (self.create_week_group(week) for week in raw_weeks)
return TimelineDO(
groups=list(clean_weeks),
continue_from=self.continue_from.timestamp() if self.search_helper.has_more(self.continue_from) else None
)
def create_week_group(self, week: dict) -> GroupDO:
nodes = week['nodes']
nodes.sort(key=itemgetter('_created'), reverse=True)
self.update_continue_from(nodes)
groups = self.create_groups(nodes)
return GroupDO(
label=f'Week {week["_id"]["week"]}, {week["_id"]["year"]}',
groups=groups
)
def create_groups(self, nodes: typing.List[dict]) -> typing.List[GroupDO]:
self.sort_nodes(nodes) # groupby assumes that the list is sorted
nodes_grouped = itertools.groupby(nodes, self.grouper.group_key())
groups = (self.clean_group(grouped_by, group) for grouped_by, group in nodes_grouped)
groups_sorted = sorted(groups, key=self.group_row_sorter, reverse=True)
return groups_sorted
def sort_nodes(self, nodes: typing.List[dict]):
nodes.sort(key=itemgetter('node_type'))
nodes.sort(key=self.grouper.sort_key())
def update_continue_from(self, sorted_nodes: typing.List[dict]):
if self.search_helper.sort_direction == 'desc':
first_created = sorted_nodes[-1]['_created']
candidate = self.continue_from or first_created
self.continue_from = min(candidate, first_created)
else:
last_created = sorted_nodes[0]['_created']
candidate = self.continue_from or last_created
self.continue_from = max(candidate, last_created)
def clean_group(self, grouped_by: typing.Any, group: typing.Iterable[dict]) -> GroupDO:
items = self.create_items(group)
return GroupDO(
label=self.grouper.label(grouped_by),
url=self.grouper.url(grouped_by),
items=items
)
def create_items(self, group) -> typing.List[dict]:
by_node_type = itertools.groupby(group, key=itemgetter('node_type'))
items = {}
for node_type, nodes in by_node_type:
items[node_type] = [self.node_prettyfy(n) for n in nodes]
return items
@classmethod
def node_prettyfy(cls, node: dict)-> dict:
duration_seconds = node['properties'].get('duration_seconds')
if duration_seconds is not None:
node['properties']['duration'] = pretty_duration(duration_seconds)
if node['node_type'] == 'post':
html = _get_markdowned_html(node['properties'], 'content')
html = shortcodes.render_commented(html, context=node['properties'])
node['properties']['pretty_content'] = html
return node
@classmethod
def group_row_sorter(cls, row: GroupDO) -> typing.Tuple[datetime, datetime]:
'''
If a group contains posts are more interesting and therefor we put them higher in up
:param row:
:return: tuple with newest post date and newest asset date
'''
def newest_created(nodes: typing.List[dict]) -> datetime:
if nodes:
return nodes[0]['_created']
return datetime.fromtimestamp(0, tz=bson.tz_util.utc)
newest_post_date = newest_created(row.items.get('post'))
newest_asset_date = newest_created(row.items.get('asset'))
return newest_post_date, newest_asset_date
def _public_project_ids() -> typing.List[bson.ObjectId]:
"""Returns a list of ObjectIDs of public projects.
Memoized in setup_app().
"""
proj_coll = current_app.db('projects')
result = proj_coll.find({'is_private': False}, {'_id': 1})
return [p['_id'] for p in result]
def _get_markdowned_html(document: dict, field_name: str) -> str:
cache_field_name = pillar.markdown.cache_field_name(field_name)
html = document.get(cache_field_name)
if html is None:
markdown_src = document.get(field_name) or ''
html = pillar.markdown.markdown(markdown_src)
return html
@blueprint.route('/', methods=['GET'])
def global_timeline():
continue_from_str = request.args.get('from')
continue_from = parse_continue_from(continue_from_str)
nbr_of_weeks_str = request.args.get('weeksToLoad')
nbr_of_weeks = parse_nbr_of_weeks(nbr_of_weeks_str)
sort_direction = request.args.get('dir', 'desc')
return _global_timeline(continue_from, nbr_of_weeks, sort_direction)
@blueprint.route('/p/<string(length=24):pid_path>', methods=['GET'])
def project_timeline(pid_path: str):
continue_from_str = request.args.get('from')
continue_from = parse_continue_from(continue_from_str)
nbr_of_weeks_str = request.args.get('weeksToLoad')
nbr_of_weeks = parse_nbr_of_weeks(nbr_of_weeks_str)
sort_direction = request.args.get('dir', 'desc')
pid = str2id(pid_path)
return _project_timeline(continue_from, nbr_of_weeks, sort_direction, pid)
def parse_continue_from(from_arg) -> typing.Optional[datetime]:
try:
from_float = float(from_arg)
except (TypeError, ValueError):
return None
return datetime.fromtimestamp(from_float, tz=bson.tz_util.utc)
def parse_nbr_of_weeks(weeks_to_load: str) -> int:
try:
return int(weeks_to_load)
except (TypeError, ValueError):
return 3
def _global_timeline(continue_from: typing.Optional[datetime], nbr_of_weeks: int, sort_direction: str):
"""Returns an aggregated view of what has happened on the site
Memoized in setup_app().
:param continue_from: Python utc timestamp where to begin aggregation
:param nbr_of_weeks: Number of weeks to return
Example output:
{
groups: [{
label: 'Week 32',
groups: [{
label: 'Spring',
url: '/p/spring',
items:{
post: [blogPostDoc, blogPostDoc],
asset: [assetDoc, assetDoc]
},
groups: ...
}]
}],
continue_from: 123456.2 // python timestamp
}
"""
builder = TimeLineBuilder(
SearchHelper(nbr_of_weeks, continue_from, _public_project_ids(), sort_direction),
ProjectGrouper
)
return jsonify_timeline(builder.build())
def jsonify_timeline(timeline: TimelineDO):
return jsonify(
attr.asdict(timeline,
recurse=True,
filter=lambda att, value: value is not None)
)
def _project_timeline(continue_from: typing.Optional[datetime], nbr_of_weeks: int, sort_direction, pid: bson.ObjectId):
"""Returns an aggregated view of what has happened on the site
Memoized in setup_app().
:param continue_from: Python utc timestamp where to begin aggregation
:param nbr_of_weeks: Number of weeks to return
Example output:
{
groups: [{
label: 'Week 32',
groups: [{
label: 'Tobias Johansson',
items:{
post: [blogPostDoc, blogPostDoc],
asset: [assetDoc, assetDoc]
},
groups: ...
}]
}],
continue_from: 123456.2 // python timestamp
}
"""
builder = TimeLineBuilder(
SearchHelper(nbr_of_weeks, continue_from, [pid], sort_direction),
UserGrouper
)
return jsonify_timeline(builder.build())
def setup_app(app, url_prefix):
global _public_project_ids
global _global_timeline
global _project_timeline
app.register_api_blueprint(blueprint, url_prefix=url_prefix)
cached = app.cache.cached(timeout=3600)
_public_project_ids = cached(_public_project_ids)
memoize = app.cache.memoize(timeout=60)
_global_timeline = memoize(_global_timeline)
_project_timeline = memoize(_project_timeline)

View File

@@ -44,16 +44,10 @@ def remove_private_keys(document):
"""Removes any key that starts with an underscore, returns result as new """Removes any key that starts with an underscore, returns result as new
dictionary. dictionary.
""" """
def do_remove(doc):
for key in list(doc.keys()):
if key.startswith('_'):
del doc[key]
elif isinstance(doc[key], dict):
doc[key] = do_remove(doc[key])
return doc
doc_copy = copy.deepcopy(document) doc_copy = copy.deepcopy(document)
do_remove(doc_copy) for key in list(doc_copy.keys()):
if key.startswith('_'):
del doc_copy[key]
try: try:
del doc_copy['allowed_methods'] del doc_copy['allowed_methods']
@@ -63,39 +57,6 @@ def remove_private_keys(document):
return doc_copy return doc_copy
def pretty_duration(seconds: typing.Union[None, int, float]):
if seconds is None:
return ''
seconds = round(seconds)
hours, seconds = divmod(seconds, 3600)
minutes, seconds = divmod(seconds, 60)
if hours > 0:
return f'{hours:02}:{minutes:02}:{seconds:02}'
else:
return f'{minutes:02}:{seconds:02}'
def pretty_duration_fractional(seconds: typing.Union[None, int, float]):
if seconds is None:
return ''
# Remove fraction of seconds from the seconds so that the rest is done as integers.
seconds, fracs = divmod(seconds, 1)
hours, seconds = divmod(int(seconds), 3600)
minutes, seconds = divmod(seconds, 60)
msec = int(round(fracs * 1000))
if msec == 0:
msec_str = ''
else:
msec_str = f'.{msec:03}'
if hours > 0:
return f'{hours:02}:{minutes:02}:{seconds:02}{msec_str}'
else:
return f'{minutes:02}:{seconds:02}{msec_str}'
class PillarJSONEncoder(json.JSONEncoder): class PillarJSONEncoder(json.JSONEncoder):
"""JSON encoder with support for Pillar resources.""" """JSON encoder with support for Pillar resources."""
@@ -103,9 +64,6 @@ class PillarJSONEncoder(json.JSONEncoder):
if isinstance(obj, datetime.datetime): if isinstance(obj, datetime.datetime):
return obj.strftime(RFC1123_DATE_FORMAT) return obj.strftime(RFC1123_DATE_FORMAT)
if isinstance(obj, datetime.timedelta):
return pretty_duration(obj.total_seconds())
if isinstance(obj, bson.ObjectId): if isinstance(obj, bson.ObjectId):
return str(obj) return str(obj)
@@ -223,8 +181,7 @@ def doc_diff(doc1, doc2, *, falsey_is_equal=True, superkey: str = None):
function won't report differences between DoesNotExist, False, '', and 0. function won't report differences between DoesNotExist, False, '', and 0.
""" """
def is_private(key): private_keys = {'_id', '_etag', '_deleted', '_updated', '_created'}
return str(key).startswith('_')
def combine_key(some_key): def combine_key(some_key):
"""Combine this key with the superkey. """Combine this key with the superkey.
@@ -245,7 +202,7 @@ def doc_diff(doc1, doc2, *, falsey_is_equal=True, superkey: str = None):
if isinstance(doc1, dict) and isinstance(doc2, dict): if isinstance(doc1, dict) and isinstance(doc2, dict):
for key in set(doc1.keys()).union(set(doc2.keys())): for key in set(doc1.keys()).union(set(doc2.keys())):
if is_private(key): if key in private_keys:
continue continue
val1 = doc1.get(key, DoesNotExist) val1 = doc1.get(key, DoesNotExist)

View File

@@ -189,7 +189,7 @@ def validate_this_token(token, oauth_subclient=None):
return None return None
g.current_user = UserClass.construct(token, db_user) g.current_user = UserClass.construct(token, db_user)
user_authenticated.send(g.current_user) user_authenticated.send(None)
return db_user return db_user

View File

@@ -331,9 +331,8 @@ def require_login(*, require_roles=set(),
def render_error() -> Response: def render_error() -> Response:
if error_view is None: if error_view is None:
resp = Forbidden().get_response() abort(403)
else: resp: Response = error_view()
resp = error_view()
resp.status_code = 403 resp.status_code = 403
return resp return resp

View File

@@ -9,8 +9,12 @@ string = functools.partial(attr.ib, validator=attr.validators.instance_of(str))
def log(name): def log(name):
"""Returns a logger """Returns a logger attr.ib
:param name: name to pass to logging.getLogger() :param name: name to pass to logging.getLogger()
:rtype: attr.ib
""" """
return logging.getLogger(name) return attr.ib(default=logging.getLogger(name),
repr=False,
hash=False,
cmp=False)

View File

@@ -12,10 +12,7 @@ from werkzeug.local import LocalProxy
from pillar import current_app from pillar import current_app
# The sender is the user that was just authenticated.
user_authenticated = blinker.Signal('Sent whenever a user was authenticated') user_authenticated = blinker.Signal('Sent whenever a user was authenticated')
user_logged_in = blinker.Signal('Sent whenever a user logged in on the web')
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
# Mapping from user role to capabilities obtained by users with that role. # Mapping from user role to capabilities obtained by users with that role.
@@ -228,8 +225,7 @@ def login_user_object(user: UserClass):
"""Log in the given user.""" """Log in the given user."""
flask_login.login_user(user, remember=True) flask_login.login_user(user, remember=True)
g.current_user = user g.current_user = user
user_authenticated.send(user) user_authenticated.send(None)
user_logged_in.send(user)
def logout_user(): def logout_user():

View File

@@ -1,48 +0,0 @@
"""Support for adding CORS headers to responses."""
import functools
import flask
import werkzeug.wrappers as wz_wrappers
import werkzeug.exceptions as wz_exceptions
def allow(*, allow_credentials=False):
"""Flask endpoint decorator, adds CORS headers to the response.
If the request has a non-empty 'Origin' header, the response header
'Access-Control-Allow-Origin' is set to the value of that request header,
and some other CORS headers are set.
"""
def decorator(wrapped):
@functools.wraps(wrapped)
def wrapper(*args, **kwargs):
request_origin = flask.request.headers.get('Origin')
if not request_origin:
# No CORS headers requested, so don't bother touching the response.
return wrapped(*args, **kwargs)
try:
response = wrapped(*args, **kwargs)
except wz_exceptions.HTTPException as ex:
response = ex.get_response()
else:
if isinstance(response, tuple):
response = flask.make_response(*response)
elif isinstance(response, str):
response = flask.make_response(response)
elif isinstance(response, wz_wrappers.Response):
pass
else:
raise TypeError(f'unknown response type {type(response)}')
assert isinstance(response, wz_wrappers.Response)
response.headers.set('Access-Control-Allow-Origin', request_origin)
response.headers.set('Access-Control-Allow-Headers', 'x-requested-with')
if allow_credentials:
response.headers.set('Access-Control-Allow-Credentials', 'true')
return response
return wrapper
return decorator

View File

@@ -7,7 +7,7 @@ from urllib.parse import urljoin
import bson import bson
import requests import requests
from pillar import current_app, auth from pillar import current_app
from pillar.api.utils import utcnow from pillar.api.utils import utcnow
SyncUser = collections.namedtuple('SyncUser', 'user_id token bid_user_id') SyncUser = collections.namedtuple('SyncUser', 'user_id token bid_user_id')
@@ -23,41 +23,6 @@ class StopRefreshing(Exception):
""" """
def find_user_to_sync(user_id: bson.ObjectId) -> typing.Optional[SyncUser]:
"""Return user information for syncing badges for a specific user.
Returns None if the user cannot be synced (no 'badge' scope on a token,
or no Blender ID user_id known).
"""
my_log = log.getChild('refresh_single_user')
now = utcnow()
tokens_coll = current_app.db('tokens')
users_coll = current_app.db('users')
token_info = tokens_coll.find_one({
'user': user_id,
'token': {'$exists': True},
'oauth_scopes': 'badge',
'expire_time': {'$gt': now},
})
if not token_info:
my_log.debug('No token with scope "badge" for user %s', user_id)
return None
user_info = users_coll.find_one({'_id': user_id})
# TODO(Sybren): do this filtering in the MongoDB query:
bid_user_ids = [auth_info.get('user_id')
for auth_info in user_info.get('auth', [])
if auth_info.get('provider', '') == 'blender-id' and auth_info.get('user_id')]
if not bid_user_ids:
my_log.debug('No Blender ID user_id for user %s', user_id)
return None
bid_user_id = bid_user_ids[0]
return SyncUser(user_id=user_id, token=token_info['token'], bid_user_id=bid_user_id)
def find_users_to_sync() -> typing.Iterable[SyncUser]: def find_users_to_sync() -> typing.Iterable[SyncUser]:
"""Return user information of syncable users with badges.""" """Return user information of syncable users with badges."""
@@ -69,7 +34,6 @@ def find_users_to_sync() -> typing.Iterable[SyncUser]:
'token': {'$exists': True}, 'token': {'$exists': True},
'oauth_scopes': 'badge', 'oauth_scopes': 'badge',
'expire_time': {'$gt': now}, 'expire_time': {'$gt': now},
# TODO(Sybren): save real token expiry time but keep checking tokens hourly when they are used!
}}, }},
{'$lookup': { {'$lookup': {
'from': 'users', 'from': 'users',
@@ -98,6 +62,7 @@ def find_users_to_sync() -> typing.Iterable[SyncUser]:
'token': True, 'token': True,
'user._id': True, 'user._id': True,
'user.auth.user_id': True, 'user.auth.user_id': True,
'user.badges.expires': True,
}}, }},
]) ])
@@ -136,7 +101,6 @@ def fetch_badge_html(session: requests.Session, user: SyncUser, size: str) \
my_log.debug('No badges for user %s', user.user_id) my_log.debug('No badges for user %s', user.user_id)
return '' return ''
if resp.status_code == 403: if resp.status_code == 403:
# TODO(Sybren): this indicates the token is invalid, so we could just as well delete it.
my_log.warning('Tried fetching %s for user %s but received a 403: %s', my_log.warning('Tried fetching %s for user %s but received a 403: %s',
url, user.user_id, resp.text) url, user.user_id, resp.text)
return '' return ''
@@ -169,14 +133,18 @@ def refresh_all_badges(only_user_id: typing.Optional[bson.ObjectId] = None, *,
jobs to run without overlapping, even when the number fo badges to refresh jobs to run without overlapping, even when the number fo badges to refresh
becomes larger than possible within the period of the cron job. becomes larger than possible within the period of the cron job.
""" """
my_log = log.getChild('refresh_all_badges') from requests.adapters import HTTPAdapter
my_log = log.getChild('fetch_badge_html')
# Test the config before we start looping over the world. # Test the config before we start looping over the world.
badge_expiry = badge_expiry_config() badge_expiry = badge_expiry_config()
if not badge_expiry or not isinstance(badge_expiry, datetime.timedelta): if not badge_expiry or not isinstance(badge_expiry, datetime.timedelta):
raise ValueError('BLENDER_ID_BADGE_EXPIRY not configured properly, should be a timedelta') raise ValueError('BLENDER_ID_BADGE_EXPIRY not configured properly, should be a timedelta')
session = _get_requests_session() session = requests.Session()
session.mount('https://', HTTPAdapter(max_retries=5))
users_coll = current_app.db('users')
deadline = utcnow() + timelimit deadline = utcnow() + timelimit
num_updates = 0 num_updates = 0
@@ -196,71 +164,20 @@ def refresh_all_badges(only_user_id: typing.Optional[bson.ObjectId] = None, *,
user_info) user_info)
break break
num_updates += 1
update_badges(user_info, badge_html, badge_expiry, dry_run=dry_run)
my_log.info('Updated badges of %d users%s', num_updates, ' (dry-run)' if dry_run else '')
def _get_requests_session() -> requests.Session:
from requests.adapters import HTTPAdapter
session = requests.Session()
session.mount('https://', HTTPAdapter(max_retries=5))
return session
def refresh_single_user(user_id: bson.ObjectId):
"""Refresh badges for a single user."""
my_log = log.getChild('refresh_single_user')
badge_expiry = badge_expiry_config()
if not badge_expiry:
my_log.warning('Skipping badge fetching, BLENDER_ID_BADGE_EXPIRY not configured')
my_log.debug('Fetching badges for user %s', user_id)
session = _get_requests_session()
user_info = find_user_to_sync(user_id)
if not user_info:
return
try:
badge_html = fetch_badge_html(session, user_info, 's')
except StopRefreshing:
my_log.error('Blender ID has internal problems, stopping badge refreshing at user %s',
user_info)
return
update_badges(user_info, badge_html, badge_expiry, dry_run=False)
my_log.info('Updated badges of user %s', user_id)
def update_badges(user_info: SyncUser, badge_html: str, badge_expiry: datetime.timedelta,
*, dry_run: bool):
my_log = log.getChild('update_badges')
users_coll = current_app.db('users')
update = {'badges': { update = {'badges': {
'html': badge_html, 'html': badge_html,
'expires': utcnow() + badge_expiry, 'expires': utcnow() + badge_expiry,
}} }}
num_updates += 1
my_log.info('Updating badges HTML for Blender ID %s, user %s', my_log.info('Updating badges HTML for Blender ID %s, user %s',
user_info.bid_user_id, user_info.user_id) user_info.bid_user_id, user_info.user_id)
if not dry_run:
if dry_run:
return
result = users_coll.update_one({'_id': user_info.user_id}, result = users_coll.update_one({'_id': user_info.user_id},
{'$set': update}) {'$set': update})
if result.matched_count != 1: if result.matched_count != 1:
my_log.warning('Unable to update badges for user %s', user_info.user_id) my_log.warning('Unable to update badges for user %s', user_info.user_id)
my_log.info('Updated badges of %d users%s', num_updates, ' (dry-run)' if dry_run else '')
def badge_expiry_config() -> datetime.timedelta: def badge_expiry_config() -> datetime.timedelta:
return current_app.config.get('BLENDER_ID_BADGE_EXPIRY') return current_app.config.get('BLENDER_ID_BADGE_EXPIRY')
@auth.user_logged_in.connect
def sync_badge_upon_login(sender: auth.UserClass, **kwargs):
"""Auto-sync badges when a user logs in."""
log.info('Refreshing badge of %s because they logged in', sender.user_id)
refresh_single_user(sender.user_id)

View File

@@ -0,0 +1,38 @@
import logging
from algoliasearch.helpers import AlgoliaException
log = logging.getLogger(__name__)
def push_updated_user(user_to_index: dict):
"""Push an update to the Algolia index when a user item is updated"""
from pillar.api.utils.algolia import index_user_save
try:
index_user_save(user_to_index)
except AlgoliaException as ex:
log.warning(
'Unable to push user info to Algolia for user "%s", id=%s; %s', # noqa
user_to_index.get('username'),
user_to_index.get('objectID'), ex)
def index_node_save(node_to_index: dict):
from pillar.api.utils import algolia
try:
algolia.index_node_save(node_to_index)
except AlgoliaException as ex:
log.warning(
'Unable to push node info to Algolia for node %s; %s', node_to_index, ex) # noqa
def index_node_delete(delete_id: str):
from pillar.api.utils import algolia
try:
algolia.index_node_delete(delete_id)
except AlgoliaException as ex:
log.warning('Unable to delete node info to Algolia for node %s; %s', delete_id, ex) # noqa

View File

@@ -1,6 +1,4 @@
import logging import logging
import bleach
from bson import ObjectId from bson import ObjectId
from pillar import current_app from pillar import current_app
@@ -12,7 +10,7 @@ from pillar.api.search import algolia_indexing
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
INDEX_ALLOWED_NODE_TYPES = {'asset', 'texture', 'group', 'hdri', 'post'} INDEX_ALLOWED_NODE_TYPES = {'asset', 'texture', 'group', 'hdri'}
SEARCH_BACKENDS = { SEARCH_BACKENDS = {
@@ -30,6 +28,34 @@ def _get_node_from_id(node_id: str):
return node return node
def _handle_picture(node: dict, to_index: dict):
"""Add picture URL in-place to the to-be-indexed node."""
picture_id = node.get('picture')
if not picture_id:
return
files_collection = current_app.data.driver.db['files']
lookup = {'_id': ObjectId(picture_id)}
picture = files_collection.find_one(lookup)
for item in picture.get('variations', []):
if item['size'] != 't':
continue
# Not all files have a project...
pid = picture.get('project')
if pid:
link = generate_link(picture['backend'],
item['file_path'],
str(pid),
is_public=True)
else:
link = item['link']
to_index['picture'] = link
break
def prepare_node_data(node_id: str, node: dict=None) -> dict: def prepare_node_data(node_id: str, node: dict=None) -> dict:
"""Given a node id or a node document, return an indexable version of it. """Given a node id or a node document, return an indexable version of it.
@@ -60,30 +86,25 @@ def prepare_node_data(node_id: str, node: dict=None) -> dict:
users_collection = current_app.data.driver.db['users'] users_collection = current_app.data.driver.db['users']
user = users_collection.find_one({'_id': ObjectId(node['user'])}) user = users_collection.find_one({'_id': ObjectId(node['user'])})
clean_description = bleach.clean(node.get('_description_html') or '', strip=True)
if not clean_description and node['node_type'] == 'post':
clean_description = bleach.clean(node['properties'].get('_content_html') or '', strip=True)
to_index = { to_index = {
'objectID': node['_id'], 'objectID': node['_id'],
'name': node['name'], 'name': node['name'],
'project': { 'project': {
'_id': project['_id'], '_id': project['_id'],
'name': project['name'], 'name': project['name']
'url': project['url'],
}, },
'created': node['_created'], 'created': node['_created'],
'updated': node['_updated'], 'updated': node['_updated'],
'node_type': node['node_type'], 'node_type': node['node_type'],
'picture': node.get('picture') or '',
'user': { 'user': {
'_id': user['_id'], '_id': user['_id'],
'full_name': user['full_name'] 'full_name': user['full_name']
}, },
'description': clean_description or None, 'description': node.get('description'),
'is_free': False
} }
_handle_picture(node, to_index)
# If the node has world permissions, compute the Free permission # If the node has world permissions, compute the Free permission
if 'world' in node.get('permissions', {}): if 'world' in node.get('permissions', {}):
if 'GET' in node['permissions']['world']: if 'GET' in node['permissions']['world']:

View File

@@ -1,9 +1,7 @@
import collections
import copy import copy
import datetime import datetime
import json
import logging import logging
from pathlib import PurePosixPath, Path from pathlib import PurePosixPath
import re import re
import typing import typing
@@ -14,7 +12,6 @@ from flask_script import Manager
import pymongo import pymongo
from pillar import current_app from pillar import current_app
import pillar.api.utils
# Collections to skip when finding file references (during orphan file detection). # Collections to skip when finding file references (during orphan file detection).
# This collection can be added to from PillarExtension.setup_app(). # This collection can be added to from PillarExtension.setup_app().
@@ -562,6 +559,50 @@ def replace_pillar_node_type_schemas(project_url=None, all_projects=False, missi
projects_changed, projects_seen) projects_changed, projects_seen)
@manager_maintenance.command
def remarkdown_comments():
"""Retranslates all Markdown to HTML for all comment nodes.
"""
from pillar.api.nodes import convert_markdown
nodes_collection = current_app.db()['nodes']
comments = nodes_collection.find({'node_type': 'comment'},
projection={'properties.content': 1,
'node_type': 1})
updated = identical = skipped = errors = 0
for node in comments:
convert_markdown(node)
node_id = node['_id']
try:
content_html = node['properties']['content_html']
except KeyError:
log.warning('Node %s has no content_html', node_id)
skipped += 1
continue
result = nodes_collection.update_one(
{'_id': node_id},
{'$set': {'properties.content_html': content_html}}
)
if result.matched_count != 1:
log.error('Unable to update node %s', node_id)
errors += 1
continue
if result.modified_count:
updated += 1
else:
identical += 1
log.info('updated : %i', updated)
log.info('identical: %i', identical)
log.info('skipped : %i', skipped)
log.info('errors : %i', errors)
@manager_maintenance.option('-p', '--project', dest='proj_url', nargs='?', @manager_maintenance.option('-p', '--project', dest='proj_url', nargs='?',
help='Project URL') help='Project URL')
@manager_maintenance.option('-a', '--all', dest='all_projects', action='store_true', default=False, @manager_maintenance.option('-a', '--all', dest='all_projects', action='store_true', default=False,
@@ -739,6 +780,113 @@ def iter_markdown(proj_node_types: dict, some_node: dict, callback: typing.Calla
doc[key] = new_value doc[key] = new_value
@manager_maintenance.option('-p', '--project', dest='proj_url', nargs='?',
help='Project URL')
@manager_maintenance.option('-a', '--all', dest='all_projects', action='store_true', default=False,
help='Replace on all projects.')
@manager_maintenance.option('-g', '--go', dest='go', action='store_true', default=False,
help='Actually perform the changes (otherwise just show as dry-run).')
def upgrade_attachment_usage(proj_url=None, all_projects=False, go=False):
"""Replaces '@[slug]' with '{attachment slug}'.
Also moves links from the attachment dict to the attachment shortcode.
"""
if bool(proj_url) == all_projects:
log.error('Use either --project or --all.')
return 1
import html
from pillar.api.projects.utils import node_type_dict
from pillar.api.utils import remove_private_keys
from pillar.api.utils.authentication import force_cli_user
force_cli_user()
nodes_coll = current_app.db('nodes')
total_nodes = 0
failed_node_ids = set()
# Use a mixture of the old slug RE that still allowes spaces in the slug
# name and the new RE that allows dashes.
old_slug_re = re.compile(r'@\[([a-zA-Z0-9_\- ]+)\]')
for proj in _db_projects(proj_url, all_projects, go=go):
proj_id = proj['_id']
proj_url = proj.get('url', '-no-url-')
nodes = nodes_coll.find({
'_deleted': {'$ne': True},
'project': proj_id,
'properties.attachments': {'$exists': True},
})
node_count = nodes.count()
if node_count == 0:
log.debug('Skipping project %s (%s)', proj_url, proj_id)
continue
proj_node_types = node_type_dict(proj)
for node in nodes:
attachments = node['properties']['attachments']
replaced = False
# Inner functions because of access to the node's attachments.
def replace(match):
nonlocal replaced
slug = match.group(1)
log.debug(' - OLD STYLE attachment slug %r', slug)
try:
att = attachments[slug]
except KeyError:
log.info("Attachment %r not found for node %s", slug, node['_id'])
link = ''
else:
link = att.get('link', '')
if link == 'self':
link = " link='self'"
elif link == 'custom':
url = att.get('link_custom')
if url:
link = " link='%s'" % html.escape(url)
replaced = True
return '{attachment %r%s}' % (slug.replace(' ', '-'), link)
def update_markdown(value: str) -> str:
return old_slug_re.sub(replace, value)
iter_markdown(proj_node_types, node, update_markdown)
# Remove no longer used properties from attachments
new_attachments = {}
for slug, attachment in attachments.items():
replaced |= 'link' in attachment # link_custom implies link
attachment.pop('link', None)
attachment.pop('link_custom', None)
new_attachments[slug.replace(' ', '-')] = attachment
node['properties']['attachments'] = new_attachments
if replaced:
total_nodes += 1
else:
# Nothing got replaced,
continue
if go:
# Use Eve to PUT, so we have schema checking.
db_node = remove_private_keys(node)
r, _, _, status = current_app.put_internal('nodes', db_node, _id=node['_id'])
if status != 200:
log.error('Error %i storing altered node %s %s', status, node['_id'], r)
failed_node_ids.add(node['_id'])
# raise SystemExit('Error storing node; see log.')
log.debug('Updated node %s: %s', node['_id'], r)
log.info('Project %s (%s) has %d nodes with attachments',
proj_url, proj_id, node_count)
log.info('%s %d nodes', 'Updated' if go else 'Would update', total_nodes)
if failed_node_ids:
log.warning('Failed to update %d of %d nodes: %s', len(failed_node_ids), total_nodes,
', '.join(str(nid) for nid in failed_node_ids))
def _db_projects(proj_url: str, all_projects: bool, project_id='', *, go: bool) \ def _db_projects(proj_url: str, all_projects: bool, project_id='', *, go: bool) \
-> typing.Iterable[dict]: -> typing.Iterable[dict]:
"""Yields a subset of the projects in the database. """Yields a subset of the projects in the database.
@@ -778,12 +926,25 @@ def _db_projects(proj_url: str, all_projects: bool, project_id='', *, go: bool)
log.info('Command took %s', duration) log.info('Command took %s', duration)
def find_object_ids(something: typing.Any) -> typing.Iterable[bson.ObjectId]: def _find_orphan_files() -> typing.Set[bson.ObjectId]:
"""Generator, yields all ObjectIDs referenced by the given object. """Finds all non-referenced files for the given project.
Assumes 'something' comes from a MongoDB. This function wasn't made for Returns an iterable of all orphan file IDs.
generic Python objects.
""" """
log.debug('Finding orphan files')
# Get all file IDs that belong to this project.
files_coll = current_app.db('files')
cursor = files_coll.find({'_deleted': {'$ne': True}}, projection={'_id': 1})
file_ids = {doc['_id'] for doc in cursor}
if not file_ids:
log.debug('No files found')
return set()
total_file_count = len(file_ids)
log.debug('Found %d files in total', total_file_count)
def find_object_ids(something: typing.Any) -> typing.Iterable[bson.ObjectId]:
if isinstance(something, bson.ObjectId): if isinstance(something, bson.ObjectId):
yield something yield something
elif isinstance(something, str) and len(something) == 24: elif isinstance(something, str) and len(something) == 24:
@@ -796,30 +957,9 @@ def find_object_ids(something: typing.Any) -> typing.Iterable[bson.ObjectId]:
for item in something: for item in something:
yield from find_object_ids(item) yield from find_object_ids(item)
elif isinstance(something, dict): elif isinstance(something, dict):
for item in something.keys():
yield from find_object_ids(item)
for item in something.values(): for item in something.values():
yield from find_object_ids(item) yield from find_object_ids(item)
def _find_orphan_files() -> typing.Set[bson.ObjectId]:
"""Finds all non-referenced files.
Returns an iterable of all orphan file IDs.
"""
log.debug('Finding orphan files')
# Get all file IDs and make a set; we'll remove any referenced object ID later.
files_coll = current_app.db('files')
cursor = files_coll.find({'_deleted': {'$ne': True}}, projection={'_id': 1})
file_ids = {doc['_id'] for doc in cursor}
if not file_ids:
log.debug('No files found')
return set()
total_file_count = len(file_ids)
log.debug('Found %d files in total', total_file_count)
# Find all references by iterating through the project itself and every document that has a # Find all references by iterating through the project itself and every document that has a
# 'project' key set to this ObjectId. # 'project' key set to this ObjectId.
db = current_app.db() db = current_app.db()
@@ -849,6 +989,7 @@ def find_orphan_files():
This is a heavy operation that inspects *everything* in MongoDB. Use with care. This is a heavy operation that inspects *everything* in MongoDB. Use with care.
""" """
from jinja2.filters import do_filesizeformat from jinja2.filters import do_filesizeformat
from pathlib import Path
output_fpath = Path(current_app.config['STORAGE_DIR']) / 'orphan-files.txt' output_fpath = Path(current_app.config['STORAGE_DIR']) / 'orphan-files.txt'
if output_fpath.exists(): if output_fpath.exists():
@@ -894,6 +1035,7 @@ def delete_orphan_files():
Use 'find_orphan_files' first to generate orphan-files.txt. Use 'find_orphan_files' first to generate orphan-files.txt.
""" """
import pymongo.results import pymongo.results
from pathlib import Path
output_fpath = Path(current_app.config['STORAGE_DIR']) / 'orphan-files.txt' output_fpath = Path(current_app.config['STORAGE_DIR']) / 'orphan-files.txt'
with output_fpath.open('r', encoding='ascii') as infile: with output_fpath.open('r', encoding='ascii') as infile:
@@ -924,412 +1066,3 @@ def delete_orphan_files():
log.warning('Soft-deletion modified %d of %d files', res.modified_count, file_count) log.warning('Soft-deletion modified %d of %d files', res.modified_count, file_count)
log.info('%d files have been soft-deleted', res.modified_count) log.info('%d files have been soft-deleted', res.modified_count)
@manager_maintenance.command
def find_video_files_without_duration():
"""Finds video files without any duration
This is a heavy operation. Use with care.
"""
output_fpath = Path(current_app.config['STORAGE_DIR']) / 'video_files_without_duration.txt'
if output_fpath.exists():
log.error('Output filename %s already exists, remove it first.', output_fpath)
return 1
start_timestamp = datetime.datetime.now()
files_coll = current_app.db('files')
starts_with_video = re.compile("^video", re.IGNORECASE)
aggr = files_coll.aggregate([
{'$match': {'content_type': starts_with_video,
'_deleted': {'$ne': True}}},
{'$unwind': '$variations'},
{'$match': {
'variations.duration': {'$not': {'$gt': 0}}
}},
{'$project': {'_id': 1}}
])
file_ids = [str(f['_id']) for f in aggr]
nbr_files = len(file_ids)
log.info('Total nbr video files without duration: %d', nbr_files)
end_timestamp = datetime.datetime.now()
duration = end_timestamp - start_timestamp
log.info('Finding files took %s', duration)
log.info('Writing Object IDs to %s', output_fpath)
with output_fpath.open('w', encoding='ascii') as outfile:
outfile.write('\n'.join(sorted(file_ids)))
@manager_maintenance.command
def find_video_nodes_without_duration():
"""Finds video nodes without any duration
This is a heavy operation. Use with care.
"""
output_fpath = Path(current_app.config['STORAGE_DIR']) / 'video_nodes_without_duration.txt'
if output_fpath.exists():
log.error('Output filename %s already exists, remove it first.', output_fpath)
return 1
start_timestamp = datetime.datetime.now()
nodes_coll = current_app.db('nodes')
aggr = nodes_coll.aggregate([
{'$match': {'node_type': 'asset',
'properties.content_type': 'video',
'_deleted': {'$ne': True},
'properties.duration_seconds': {'$not': {'$gt': 0}}}},
{'$project': {'_id': 1}}
])
file_ids = [str(f['_id']) for f in aggr]
nbr_files = len(file_ids)
log.info('Total nbr video nodes without duration: %d', nbr_files)
end_timestamp = datetime.datetime.now()
duration = end_timestamp - start_timestamp
log.info('Finding nodes took %s', duration)
log.info('Writing Object IDs to %s', output_fpath)
with output_fpath.open('w', encoding='ascii') as outfile:
outfile.write('\n'.join(sorted(file_ids)))
@manager_maintenance.option('-n', '--nodes', dest='nodes_to_update', nargs='*',
help='List of nodes to update')
@manager_maintenance.option('-a', '--all', dest='all_nodes', action='store_true', default=False,
help='Update on all video nodes.')
@manager_maintenance.option('-g', '--go', dest='go', action='store_true', default=False,
help='Actually perform the changes (otherwise just show as dry-run).')
def reconcile_node_video_duration(nodes_to_update=None, all_nodes=False, go=False):
"""Copy video duration from file.variations.duration to node.properties.duraion_seconds
This is a heavy operation. Use with care.
"""
from pillar.api.utils import random_etag, utcnow
if bool(nodes_to_update) == all_nodes:
log.error('Use either --nodes or --all.')
return 1
start_timestamp = datetime.datetime.now()
nodes_coll = current_app.db('nodes')
node_subset = []
if nodes_to_update:
node_subset = [{'$match': {'_id': {'$in': [ObjectId(nid) for nid in nodes_to_update]}}}]
files = nodes_coll.aggregate(
[
*node_subset,
{'$match': {
'node_type': 'asset',
'properties.content_type': 'video',
'_deleted': {'$ne': True}}
},
{'$lookup': {
'from': 'files',
'localField': 'properties.file',
'foreignField': '_id',
'as': '_files',
}},
{'$unwind': '$_files'},
{'$unwind': '$_files.variations'},
{'$match': {'_files.variations.duration': {'$gt': 0}}},
{'$addFields': {
'need_update': {
'$ne': ['$_files.variations.duration', '$properties.duration_seconds']}
}},
{'$match': {'need_update': True}},
{'$project': {
'_id': 1,
'duration': '$_files.variations.duration',
}}]
)
if not go:
log.info('Would try to update %d nodes', len(list(files)))
return 0
modified_count = 0
for f in files:
log.debug('Updating node %s with duration %d', f['_id'], f['duration'])
new_etag = random_etag()
now = utcnow()
resp = nodes_coll.update_one(
{'_id': f['_id']},
{'$set': {
'properties.duration_seconds': f['duration'],
'_etag': new_etag,
'_updated': now,
}}
)
if resp.modified_count == 0:
log.debug('Node %s was already up to date', f['_id'])
modified_count += resp.modified_count
log.info('Updated %d nodes', modified_count)
end_timestamp = datetime.datetime.now()
duration = end_timestamp - start_timestamp
log.info('Operation took %s', duration)
return 0
@manager_maintenance.option('-g', '--go', dest='go', action='store_true', default=False,
help='Actually perform the changes (otherwise just show as dry-run).')
def delete_projectless_files(go=False):
"""Soft-deletes file documents of projects that have been deleted.
WARNING: this also soft-deletes file documents that do not have a project
property at all.
"""
start_timestamp = datetime.datetime.now()
files_coll = current_app.db('files')
aggr = files_coll.aggregate([
{'$match': {'_deleted': {'$ne': True}}},
{'$lookup': {
'from': 'projects',
'localField': 'project',
'foreignField': '_id',
'as': '_project'
}},
{'$match': {'$or': [
{'_project': []},
{'_project._deleted': True},
]}},
{'$project': {'_id': True}},
])
files_to_delete: typing.List[ObjectId] = [doc['_id'] for doc in aggr]
orphan_count = len(files_to_delete)
log.info('Total number of files to soft-delete: %d', orphan_count)
total_count = files_coll.count_documents({'_deleted': {'$ne': True}})
log.info('Total nr of orphan files: %d', orphan_count)
log.info('Total nr of files : %d', total_count)
log.info('Orphan percentage : %d%%', 100 * orphan_count / total_count)
if go:
log.info('Soft-deleting all %d projectless files', orphan_count)
now = pillar.api.utils.utcnow()
etag = pillar.api.utils.random_etag()
result = files_coll.update_many(
{'_id': {'$in': files_to_delete}},
{'$set': {
'_deleted': True,
'_updated': now,
'_etag': etag,
}},
)
log.info('Matched count: %d', result.matched_count)
log.info('Modified count: %d', result.modified_count)
end_timestamp = datetime.datetime.now()
duration = end_timestamp - start_timestamp
if go:
verb = 'Soft-deleting'
else:
verb = 'Finding'
log.info('%s orphans took %s', verb, duration)
@manager_maintenance.command
def find_projects_for_files():
"""For file documents without project, tries to find in which project files are used.
This is a heavy operation that inspects *everything* in MongoDB. Use with care.
"""
output_fpath = Path(current_app.config['STORAGE_DIR']) / 'files-without-project.json'
if output_fpath.exists():
log.error('Output filename %s already exists, remove it first.', output_fpath)
return 1
start_timestamp = datetime.datetime.now()
log.info('Finding files to fix...')
files_coll = current_app.db('files')
query = {'project': {'$exists': False},
'_deleted': {'$ne': True}}
files_to_fix = {file_doc['_id']: None for file_doc in files_coll.find(query)}
if not files_to_fix:
log.info('No files without projects found, congratulations.')
return 0
# Find all references by iterating through every node and project, and
# hoping that they reference the file.
projects_coll = current_app.db('projects')
existing_projects: typing.MutableSet[ObjectId] = set()
for doc in projects_coll.find():
project_id = doc['_id']
existing_projects.add(project_id)
for obj_id in find_object_ids(doc):
if obj_id not in files_to_fix:
continue
files_to_fix[obj_id] = project_id
nodes_coll = current_app.db('nodes')
for doc in nodes_coll.find():
project_id = doc.get('project')
if not project_id:
log.warning('Skipping node %s, as it is not part of any project', doc['_id'])
continue
if project_id not in existing_projects:
log.warning('Skipping node %s, as its project %s does not exist',
doc['_id'], project_id)
continue
for obj_id in find_object_ids(doc):
if obj_id not in files_to_fix:
continue
files_to_fix[obj_id] = project_id
orphans = {oid for oid, project_id in files_to_fix.items()
if project_id is None}
fixable = {str(oid): str(project_id)
for oid, project_id in files_to_fix.items()
if project_id is not None}
log.info('Total nr of orphan files : %d', len(orphans))
log.info('Total nr of fixable files: %d', len(fixable))
projects = set(fixable.values())
log.info('Fixable project count : %d', len(projects))
for project_id in projects:
project = projects_coll.find_one(ObjectId(project_id))
log.info(' - %40s /p/%-20s created on %s, ',
project['name'], project['url'], project['_created'])
end_timestamp = datetime.datetime.now()
duration = end_timestamp - start_timestamp
log.info('Finding projects took %s', duration)
log.info('Writing {file_id: project_id} mapping to %s', output_fpath)
with output_fpath.open('w', encoding='ascii') as outfile:
json.dump(fixable, outfile, indent=4, sort_keys=True)
@manager_maintenance.option('filepath', type=Path,
help='JSON file produced by find_projects_for_files')
@manager_maintenance.option('-g', '--go', dest='go', action='store_true', default=False,
help='Actually perform the changes (otherwise just show as dry-run).')
def fix_projects_for_files(filepath: Path, go=False):
"""Assigns file documents to projects.
Use 'manage.py maintenance find_projects_for_files` to produce the JSON
file that contains the file ID to project ID mapping.
"""
log.info('Loading %s', filepath)
with filepath.open('r', encoding='ascii') as infile:
mapping: typing.Mapping[str, str] = json.load(infile)
# Group IDs per project for more efficient querying.
log.info('Grouping per project')
project_to_file_ids: typing.Mapping[ObjectId, typing.List[ObjectId]] = \
collections.defaultdict(list)
for file_id, project_id in mapping.items():
project_to_file_ids[ObjectId(project_id)].append(ObjectId(file_id))
MockUpdateResult = collections.namedtuple('MockUpdateResult', 'matched_count modified_count')
files_coll = current_app.db('files')
total_matched = total_modified = 0
for project_oid, file_oids in project_to_file_ids.items():
query = {'_id': {'$in': file_oids}}
if go:
result = files_coll.update_many(query, {'$set': {'project': project_oid}})
else:
found = files_coll.count_documents(query)
result = MockUpdateResult(found, 0)
total_matched += result.matched_count
total_modified += result.modified_count
if result.matched_count != len(file_oids):
log.warning('Matched only %d of %d files; modified %d; for project %s',
result.matched_count, len(file_oids), result.modified_count, project_oid)
else:
log.info('Matched all %d files; modified %d; for project %s',
result.matched_count, result.modified_count, project_oid)
log.info('Done updating %d files (found %d, modified %d) on %d projects',
len(mapping), total_matched, total_modified, len(project_to_file_ids))
@manager_maintenance.option('-u', '--user', dest='user', nargs='?',
help='Update subscriptions for single user.')
@manager_maintenance.option('-o', '--object', dest='context_object', nargs='?',
help='Update subscriptions for context_object.')
@manager_maintenance.option('-g', '--go', dest='go', action='store_true', default=False,
help='Actually perform the changes (otherwise just show as dry-run).')
def fix_missing_activities_subscription_defaults(user=None, context_object=None, go=False):
"""Assign default values to activities-subscriptions documents where values are missing.
"""
subscriptions_collection = current_app.db('activities-subscriptions')
lookup_is_subscribed = {
'is_subscribed': {'$exists': False},
}
lookup_notifications = {
'notifications.web': {'$exists': False},
}
if user:
lookup_is_subscribed['user'] = ObjectId(user)
lookup_notifications['user'] = ObjectId(user)
if context_object:
lookup_is_subscribed['context_object'] = ObjectId(context_object)
lookup_notifications['context_object'] = ObjectId(context_object)
num_need_is_subscribed_update = subscriptions_collection.count(lookup_is_subscribed)
log.info("Found %d documents that needs to be update 'is_subscribed'", num_need_is_subscribed_update)
num_need_notification_web_update = subscriptions_collection.count(lookup_notifications)
log.info("Found %d documents that needs to be update 'notifications.web'", num_need_notification_web_update)
if not go:
return
if num_need_is_subscribed_update > 0:
log.info("Updating 'is_subscribed'")
resp = subscriptions_collection.update(
lookup_is_subscribed,
{
'$set': {'is_subscribed': True}
},
multi=True,
upsert=False
)
if resp['nModified'] is not num_need_is_subscribed_update:
log.warning("Expected % documents to be update, was %d",
num_need_is_subscribed_update, resp['nModified'])
if num_need_notification_web_update > 0:
log.info("Updating 'notifications.web'")
resp = subscriptions_collection.update(
lookup_notifications,
{
'$set': {'notifications.web': True}
},
multi=True,
upsert=False
)
if resp['nModified'] is not num_need_notification_web_update:
log.warning("Expected % documents to be update, was %d",
num_need_notification_web_update, resp['nModified'])
log.info("Done updating 'activities-subscriptions' documents")

View File

@@ -195,7 +195,7 @@ BLENDER_CLOUD_ADDON_VERSION = '1.4'
TLS_CERT_FILE = requests.certs.where() TLS_CERT_FILE = requests.certs.where()
CELERY_BACKEND = 'redis://redis/1' CELERY_BACKEND = 'redis://redis/1'
CELERY_BROKER = 'redis://redis/2' CELERY_BROKER = 'amqp://guest:guest@rabbit//'
# This configures the Celery task scheduler in such a way that we don't # This configures the Celery task scheduler in such a way that we don't
# have to import the pillar.celery.XXX modules. Remember to run # have to import the pillar.celery.XXX modules. Remember to run
@@ -208,8 +208,8 @@ CELERY_BEAT_SCHEDULE = {
}, },
'refresh-blenderid-badges': { 'refresh-blenderid-badges': {
'task': 'pillar.celery.badges.sync_badges_for_users', 'task': 'pillar.celery.badges.sync_badges_for_users',
'schedule': 10 * 60, # every N seconds 'schedule': 600, # every N seconds
'args': (9 * 60, ), # time limit in seconds, keep shorter than 'schedule' 'args': (540, ), # time limit in seconds, keep shorter than 'schedule'
} }
} }
@@ -270,14 +270,3 @@ STATIC_FILE_HASH = ''
# all API endpoints do not need it. On the views that require it, we use the # all API endpoints do not need it. On the views that require it, we use the
# current_app.csrf.protect() method. # current_app.csrf.protect() method.
WTF_CSRF_CHECK_DEFAULT = False WTF_CSRF_CHECK_DEFAULT = False
# Flask Debug Toolbar. Enable it by overriding DEBUG_TB_ENABLED in config_local.py.
DEBUG_TB_ENABLED = False
DEBUG_TB_PANELS = [
'flask_debugtoolbar.panels.versions.VersionDebugPanel',
'flask_debugtoolbar.panels.headers.HeaderDebugPanel',
'flask_debugtoolbar.panels.request_vars.RequestVarsDebugPanel',
'flask_debugtoolbar.panels.config_vars.ConfigVarsDebugPanel',
'flask_debugtoolbar.panels.template.TemplateDebugPanel',
'flask_debugtoolbar.panels.logger.LoggingPanel',
'flask_debugtoolbar.panels.route_list.RouteListDebugPanel']

View File

@@ -163,11 +163,8 @@ class YouTube:
return html_module.escape('{youtube invalid YouTube ID/URL}') return html_module.escape('{youtube invalid YouTube ID/URL}')
src = f'https://www.youtube.com/embed/{youtube_id}?rel=0' src = f'https://www.youtube.com/embed/{youtube_id}?rel=0'
html = f'<div class="embed-responsive embed-responsive-16by9">' \ html = f'<iframe class="shortcode youtube" width="{width}" height="{height}" src="{src}"' \
f'<iframe class="shortcode youtube embed-responsive-item"' \ f' frameborder="0" allow="autoplay; encrypted-media" allowfullscreen></iframe>'
f' width="{width}" height="{height}" src="{src}"' \
f' frameborder="0" allow="autoplay; encrypted-media" allowfullscreen></iframe>' \
f'</div>'
return html return html
@@ -228,25 +225,12 @@ class Attachment:
return self.render(file_doc, pargs, kwargs) return self.render(file_doc, pargs, kwargs)
def sdk_file(self, slug: str, document: dict) -> pillarsdk.File: def sdk_file(self, slug: str, node_properties: dict) -> pillarsdk.File:
"""Return the file document for the attachment with this slug.""" """Return the file document for the attachment with this slug."""
from pillar.web import system_util from pillar.web import system_util
# TODO (fsiddi) Make explicit what 'document' is. attachments = node_properties.get('properties', {}).get('attachments', {})
# In some cases we pass the entire node or project documents, in other cases
# we pass node.properties. This should be unified at the level of do_markdown.
# For now we do a quick hack and first look for 'properties' in the doc,
# then we look for 'attachments'.
doc_properties = document.get('properties')
if doc_properties:
# We passed an entire document (all nodes must have 'properties')
attachments = doc_properties.get('attachments', {})
else:
# The value of document could have been defined as 'node.properties'
attachments = document.get('attachments', {})
attachment = attachments.get(slug) attachment = attachments.get(slug)
if not attachment: if not attachment:
raise self.NoSuchSlug(slug) raise self.NoSuchSlug(slug)

View File

@@ -174,10 +174,6 @@ class AbstractPillarTest(TestMinimal):
for modname in remove: for modname in remove:
del sys.modules[modname] del sys.modules[modname]
def url_for(self, endpoint, **values):
with self.app.app_context():
return flask.url_for(endpoint, **values)
def ensure_file_exists(self, file_overrides=None, *, example_file=None) -> (ObjectId, dict): def ensure_file_exists(self, file_overrides=None, *, example_file=None) -> (ObjectId, dict):
if example_file is None: if example_file is None:
example_file = ctd.EXAMPLE_FILE example_file = ctd.EXAMPLE_FILE
@@ -355,15 +351,13 @@ class AbstractPillarTest(TestMinimal):
# TODO: rename to 'create_auth_token' now that 'expire_in_days' can be negative. # TODO: rename to 'create_auth_token' now that 'expire_in_days' can be negative.
def create_valid_auth_token(self, def create_valid_auth_token(self,
user_id: typing.Union[str, ObjectId], user_id: ObjectId,
token='token', token='token',
*, *,
oauth_scopes: typing.Optional[typing.List[str]]=None, oauth_scopes: typing.Optional[typing.List[str]]=None,
expire_in_days=1) -> dict: expire_in_days=1) -> dict:
from pillar.api.utils import utcnow from pillar.api.utils import utcnow
if isinstance(user_id, str):
user_id = ObjectId(user_id)
future = utcnow() + datetime.timedelta(days=expire_in_days) future = utcnow() + datetime.timedelta(days=expire_in_days)
with self.app.test_request_context(): with self.app.test_request_context():

View File

@@ -1,7 +1,6 @@
"""Our custom Jinja filters and other template stuff.""" """Our custom Jinja filters and other template stuff."""
import functools import functools
import json
import logging import logging
import typing import typing
import urllib.parse import urllib.parse
@@ -14,8 +13,6 @@ import werkzeug.exceptions as wz_exceptions
import pillarsdk import pillarsdk
import pillar.api.utils import pillar.api.utils
from pillar import auth
from pillar.api.utils import pretty_duration
from pillar.web.utils import pretty_date from pillar.web.utils import pretty_date
from pillar.web.nodes.routes import url_for_node from pillar.web.nodes.routes import url_for_node
import pillar.markdown import pillar.markdown
@@ -31,14 +28,6 @@ def format_pretty_date_time(d):
return pretty_date(d, detail=True) return pretty_date(d, detail=True)
def format_pretty_duration(s):
return pretty_duration(s)
def format_pretty_duration_fractional(s):
return pillar.api.utils.pretty_duration_fractional(s)
def format_undertitle(s): def format_undertitle(s):
"""Underscore-replacing title filter. """Underscore-replacing title filter.
@@ -211,32 +200,9 @@ def do_yesno(value, arg=None):
return no return no
def user_to_dict(user: auth.UserClass) -> dict:
return dict(
user_id=str(user.user_id),
username=user.username,
full_name=user.full_name,
gravatar=user.gravatar,
email=user.email,
capabilities=list(user.capabilities),
badges_html=user.badges_html,
is_authenticated=user.is_authenticated
)
def do_json(some_object) -> str:
if isinstance(some_object, pillarsdk.Resource):
some_object = some_object.to_dict()
if isinstance(some_object, auth.UserClass):
some_object = user_to_dict(some_object)
return json.dumps(some_object)
def setup_jinja_env(jinja_env, app_config: dict): def setup_jinja_env(jinja_env, app_config: dict):
jinja_env.filters['pretty_date'] = format_pretty_date jinja_env.filters['pretty_date'] = format_pretty_date
jinja_env.filters['pretty_date_time'] = format_pretty_date_time jinja_env.filters['pretty_date_time'] = format_pretty_date_time
jinja_env.filters['pretty_duration'] = format_pretty_duration
jinja_env.filters['pretty_duration_fractional'] = format_pretty_duration_fractional
jinja_env.filters['undertitle'] = format_undertitle jinja_env.filters['undertitle'] = format_undertitle
jinja_env.filters['hide_none'] = do_hide_none jinja_env.filters['hide_none'] = do_hide_none
jinja_env.filters['pluralize'] = do_pluralize jinja_env.filters['pluralize'] = do_pluralize
@@ -246,7 +212,6 @@ def setup_jinja_env(jinja_env, app_config: dict):
jinja_env.filters['yesno'] = do_yesno jinja_env.filters['yesno'] = do_yesno
jinja_env.filters['repr'] = repr jinja_env.filters['repr'] = repr
jinja_env.filters['urljoin'] = functools.partial(urllib.parse.urljoin, allow_fragments=True) jinja_env.filters['urljoin'] = functools.partial(urllib.parse.urljoin, allow_fragments=True)
jinja_env.filters['json'] = do_json
jinja_env.globals['url_for_node'] = do_url_for_node jinja_env.globals['url_for_node'] = do_url_for_node
jinja_env.globals['abs_url'] = functools.partial(flask.url_for, jinja_env.globals['abs_url'] = functools.partial(flask.url_for,
_external=True, _external=True,

View File

@@ -0,0 +1,236 @@
import logging
from flask import current_app
from flask import request
from flask import jsonify
from flask import render_template
from flask_login import login_required, current_user
from pillarsdk import Node
from pillarsdk import Project
import werkzeug.exceptions as wz_exceptions
from pillar.api.utils import utcnow
from pillar.web import subquery
from pillar.web.nodes.routes import blueprint
from pillar.web.utils import gravatar
from pillar.web.utils import pretty_date
from pillar.web.utils import system_util
log = logging.getLogger(__name__)
@blueprint.route('/comments/create', methods=['POST'])
@login_required
def comments_create():
content = request.form['content']
parent_id = request.form.get('parent_id')
if not parent_id:
log.warning('User %s tried to create comment without parent_id', current_user.objectid)
raise wz_exceptions.UnprocessableEntity()
api = system_util.pillar_api()
parent_node = Node.find(parent_id, api=api)
if not parent_node:
log.warning('Unable to create comment for user %s, parent node %r not found',
current_user.objectid, parent_id)
raise wz_exceptions.UnprocessableEntity()
log.info('Creating comment for user %s on parent node %r',
current_user.objectid, parent_id)
comment_props = dict(
project=parent_node.project,
name='Comment',
user=current_user.objectid,
node_type='comment',
properties=dict(
content=content,
status='published',
confidence=0,
rating_positive=0,
rating_negative=0))
if parent_id:
comment_props['parent'] = parent_id
# Get the parent node and check if it's a comment. In which case we flag
# the current comment as a reply.
parent_node = Node.find(parent_id, api=api)
if parent_node.node_type == 'comment':
comment_props['properties']['is_reply'] = True
comment = Node(comment_props)
comment.create(api=api)
return jsonify({'node_id': comment._id}), 201
@blueprint.route('/comments/<string(length=24):comment_id>', methods=['POST'])
@login_required
def comment_edit(comment_id):
"""Allows a user to edit their comment."""
from pillar.web import jinja
api = system_util.pillar_api()
comment = Node({'_id': comment_id})
result = comment.patch({'op': 'edit', 'content': request.form['content']}, api=api)
assert result['_status'] == 'OK'
return jsonify({
'status': 'success',
'data': {
'content': result.properties.content or '',
'content_html': jinja.do_markdowned(result.properties, 'content'),
}})
def format_comment(comment, is_reply=False, is_team=False, replies=None):
"""Format a comment node into a simpler dictionary.
:param comment: the comment object
:param is_reply: True if the comment is a reply to another comment
:param is_team: True if the author belongs to the group that owns the node
:param replies: list of replies (formatted with this function)
"""
try:
is_own = (current_user.objectid == comment.user._id) \
if current_user.is_authenticated else False
except AttributeError:
current_app.bugsnag.notify(Exception(
'Missing user for embedded user ObjectId'),
meta_data={'nodes_info': {'node_id': comment['_id']}})
return
is_rated = False
is_rated_positive = None
if comment.properties.ratings:
for rating in comment.properties.ratings:
if current_user.is_authenticated and rating.user == current_user.objectid:
is_rated = True
is_rated_positive = rating.is_positive
break
return dict(_id=comment._id,
gravatar=gravatar(comment.user.email, size=32),
time_published=pretty_date(comment._created or utcnow(), detail=True),
rating=comment.properties.rating_positive - comment.properties.rating_negative,
author=comment.user.full_name,
author_username=comment.user.username,
content=comment.properties.content,
is_reply=is_reply,
is_own=is_own,
is_rated=is_rated,
is_rated_positive=is_rated_positive,
is_team=is_team,
replies=replies)
@blueprint.route('/<string(length=24):node_id>/comments')
def comments_for_node(node_id):
"""Shows the comments attached to the given node.
The URL can be overridden in order to define can_post_comments in a different way
"""
api = system_util.pillar_api()
node = Node.find(node_id, api=api)
project = Project({'_id': node.project})
can_post_comments = project.node_type_has_method('comment', 'POST', api=api)
can_comment_override = request.args.get('can_comment', 'True') == 'True'
can_post_comments = can_post_comments and can_comment_override
return render_comments_for_node(node_id, can_post_comments=can_post_comments)
def render_comments_for_node(node_id: str, *, can_post_comments: bool):
"""Render the list of comments for a node."""
api = system_util.pillar_api()
# Query for all children, i.e. comments on the node.
comments = Node.all({
'where': {'node_type': 'comment', 'parent': node_id},
}, api=api)
def enrich(some_comment):
some_comment['_user'] = subquery.get_user_info(some_comment['user'])
some_comment['_is_own'] = some_comment['user'] == current_user.objectid
some_comment['_current_user_rating'] = None # tri-state boolean
some_comment[
'_rating'] = some_comment.properties.rating_positive - some_comment.properties.rating_negative
if current_user.is_authenticated:
for rating in some_comment.properties.ratings or ():
if rating.user != current_user.objectid:
continue
some_comment['_current_user_rating'] = rating.is_positive
for comment in comments['_items']:
# Query for all grandchildren, i.e. replies to comments on the node.
comment['_replies'] = Node.all({
'where': {'node_type': 'comment', 'parent': comment['_id']},
}, api=api)
enrich(comment)
for reply in comment['_replies']['_items']:
enrich(reply)
nr_of_comments = sum(1 + comment['_replies']['_meta']['total']
for comment in comments['_items'])
return render_template('nodes/custom/comment/list_embed.html',
node_id=node_id,
comments=comments,
nr_of_comments=nr_of_comments,
show_comments=True,
can_post_comments=can_post_comments)
@blueprint.route('/<string(length=24):node_id>/commentform')
def commentform_for_node(node_id):
"""Shows only the comment for for comments attached to the given node.
i.e. does not show the comments themselves, just the form to post a new comment.
"""
api = system_util.pillar_api()
node = Node.find(node_id, api=api)
project = Project({'_id': node.project})
can_post_comments = project.node_type_has_method('comment', 'POST', api=api)
return render_template('nodes/custom/comment/list_embed.html',
node_id=node_id,
show_comments=False,
can_post_comments=can_post_comments)
@blueprint.route("/comments/<comment_id>/rate/<operation>", methods=['POST'])
@login_required
def comments_rate(comment_id, operation):
"""Comment rating function
:param comment_id: the comment id
:type comment_id: str
:param rating: the rating (is cast from 0 to False and from 1 to True)
:type rating: int
"""
if operation not in {'revoke', 'upvote', 'downvote'}:
raise wz_exceptions.BadRequest('Invalid operation')
api = system_util.pillar_api()
# PATCH the node and return the result.
comment = Node({'_id': comment_id})
result = comment.patch({'op': operation}, api=api)
assert result['_status'] == 'OK'
return jsonify({
'status': 'success',
'data': {
'op': operation,
'rating_positive': result.properties.rating_positive,
'rating_negative': result.properties.rating_negative,
}})

View File

@@ -19,7 +19,6 @@ from pillar.web.nodes.routes import url_for_node
from pillar.web.nodes.forms import get_node_form from pillar.web.nodes.forms import get_node_form
import pillar.web.nodes.attachments import pillar.web.nodes.attachments
from pillar.web.projects.routes import project_update_nodes_list from pillar.web.projects.routes import project_update_nodes_list
from pillar.web.projects.routes import project_navigation_links
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@@ -62,10 +61,16 @@ def posts_view(project_id=None, project_url=None, url=None, *, archive=False, pa
post.picture = get_file(post.picture, api=api) post.picture = get_file(post.picture, api=api)
post.url = url_for_node(node=post) post.url = url_for_node(node=post)
# Use the *_main_project.html template for the main blog
is_main_project = project_id == current_app.config['MAIN_PROJECT_ID']
main_project_template = '_main_project' if is_main_project else ''
main_project_template = '_main_project'
index_arch = 'archive' if archive else 'index' index_arch = 'archive' if archive else 'index'
template_path = f'nodes/custom/blog/{index_arch}.html', template_path = f'nodes/custom/blog/{index_arch}{main_project_template}.html',
if url: if url:
template_path = f'nodes/custom/post/view{main_project_template}.html',
post = Node.find_one({ post = Node.find_one({
'where': {'parent': blog._id, 'properties.url': url}, 'where': {'parent': blog._id, 'properties.url': url},
'embedded': {'node_type': 1, 'user': 1}, 'embedded': {'node_type': 1, 'user': 1},
@@ -90,7 +95,6 @@ def posts_view(project_id=None, project_url=None, url=None, *, archive=False, pa
can_create_blog_posts = project.node_type_has_method('post', 'POST', api=api) can_create_blog_posts = project.node_type_has_method('post', 'POST', api=api)
# Use functools.partial so we can later pass page=X. # Use functools.partial so we can later pass page=X.
is_main_project = project_id == current_app.config['MAIN_PROJECT_ID']
if is_main_project: if is_main_project:
url_func = functools.partial(url_for, 'main.main_blog_archive') url_func = functools.partial(url_for, 'main.main_blog_archive')
else: else:
@@ -108,21 +112,24 @@ def posts_view(project_id=None, project_url=None, url=None, *, archive=False, pa
else: else:
project.blog_archive_prev = None project.blog_archive_prev = None
navigation_links = project_navigation_links(project, api) title = 'blog_main' if is_main_project else 'blog'
extension_sidebar_links = current_app.extension_sidebar_links(project)
pages = Node.all({
'where': {'project': project._id, 'node_type': 'page'},
'projection': {'name': 1}}, api=api)
return render_template( return render_template(
template_path, template_path,
blog=blog, blog=blog,
node=post, # node is used by the generic comments rendering (see custom/_scripts.pug) node=post,
posts=posts._items, posts=posts._items,
posts_meta=pmeta, posts_meta=pmeta,
more_posts_available=pmeta['total'] > pmeta['max_results'], more_posts_available=pmeta['total'] > pmeta['max_results'],
project=project, project=project,
title=title,
node_type_post=project.get_node_type('post'), node_type_post=project.get_node_type('post'),
can_create_blog_posts=can_create_blog_posts, can_create_blog_posts=can_create_blog_posts,
navigation_links=navigation_links, pages=pages._items,
extension_sidebar_links=extension_sidebar_links,
api=api) api=api)

View File

@@ -48,12 +48,9 @@ def find_for_comment(project, node):
continue continue
try: try:
parent = Node.find_one({'where': { parent = Node.find(parent.parent, api=api)
'_id': parent.parent,
'_deleted': {'$ne': True}
}}, api=api)
except ResourceNotFound: except ResourceNotFound:
log.debug( log.warning(
'url_for_node(node_id=%r): Unable to find parent node %r', 'url_for_node(node_id=%r): Unable to find parent node %r',
node['_id'], parent.parent) node['_id'], parent.parent)
raise ValueError('Unable to find parent node %r' % parent.parent) raise ValueError('Unable to find parent node %r' % parent.parent)

View File

@@ -50,7 +50,6 @@ def iter_node_properties(node_type):
@functools.lru_cache(maxsize=1) @functools.lru_cache(maxsize=1)
def tag_choices() -> typing.List[typing.Tuple[str, str]]: def tag_choices() -> typing.List[typing.Tuple[str, str]]:
"""Return (value, label) tuples for the NODE_TAGS config setting.""" """Return (value, label) tuples for the NODE_TAGS config setting."""
#TODO(fsiddi) consider allowing tags based on custom_properties in the project.
tags = current_app.config.get('NODE_TAGS') or [] tags = current_app.config.get('NODE_TAGS') or []
return [(tag, tag.title()) for tag in tags] # (value, label) tuples return [(tag, tag.title()) for tag in tags] # (value, label) tuples
@@ -71,7 +70,9 @@ def add_form_properties(form_class, node_type):
# Recursive call if detects a dict # Recursive call if detects a dict
field_type = schema_prop['type'] field_type = schema_prop['type']
if field_type == 'dict': if prop_name == 'tags' and field_type == 'list':
field = SelectMultipleField(choices=tag_choices())
elif field_type == 'dict':
assert prop_name == 'attachments' assert prop_name == 'attachments'
field = attachments.attachment_form_group_create(schema_prop) field = attachments.attachment_form_group_create(schema_prop)
elif field_type == 'list': elif field_type == 'list':

View File

@@ -1,9 +1,9 @@
import os import os
import json
import logging import logging
from datetime import datetime from datetime import datetime
import pillarsdk import pillarsdk
from pillar import shortcodes
from pillarsdk import Node from pillarsdk import Node
from pillarsdk import Project from pillarsdk import Project
from pillarsdk.exceptions import ResourceNotFound from pillarsdk.exceptions import ResourceNotFound
@@ -17,12 +17,15 @@ from flask import request
from flask import jsonify from flask import jsonify
from flask import abort from flask import abort
from flask_login import current_user from flask_login import current_user
from flask_wtf.csrf import validate_csrf
import werkzeug.exceptions as wz_exceptions import werkzeug.exceptions as wz_exceptions
from wtforms import SelectMultipleField from wtforms import SelectMultipleField
from flask_login import login_required from flask_login import login_required
from jinja2.exceptions import TemplateNotFound from jinja2.exceptions import TemplateNotFound
from pillar.api.utils.authorization import check_permissions
from pillar.web.utils import caching
from pillar.markdown import markdown from pillar.markdown import markdown
from pillar.web.nodes.forms import get_node_form from pillar.web.nodes.forms import get_node_form
from pillar.web.nodes.forms import process_node_form from pillar.web.nodes.forms import process_node_form
@@ -105,11 +108,6 @@ def view(node_id, extra_template_args: dict=None):
node_type_name = node.node_type node_type_name = node.node_type
if node_type_name == 'page':
# HACK: The 'edit node' page GETs this endpoint, but for pages it's plain wrong,
# so we just redirect to the correct URL.
return redirect(url_for_node(node=node))
if node_type_name == 'post' and not request.args.get('embed'): if node_type_name == 'post' and not request.args.get('embed'):
# Posts shouldn't be shown at this route (unless viewed embedded, tipically # Posts shouldn't be shown at this route (unless viewed embedded, tipically
# after an edit. Redirect to the correct one. # after an edit. Redirect to the correct one.
@@ -489,14 +487,11 @@ def preview_markdown():
current_app.csrf.protect() current_app.csrf.protect()
try: try:
content = request.json['content'] content = request.form['content']
except KeyError: except KeyError:
return jsonify({'_status': 'ERR', return jsonify({'_status': 'ERR',
'message': 'The field "content" was not specified.'}), 400 'message': 'The field "content" was not specified.'}), 400
html = markdown(content) return jsonify(content=markdown(content))
attachmentsdict = request.json.get('attachments', {})
html = shortcodes.render_commented(html, context={'attachments': attachmentsdict})
return jsonify(content=html)
def ensure_lists_exist_as_empty(node_doc, node_type): def ensure_lists_exist_as_empty(node_doc, node_type):
@@ -609,94 +604,5 @@ def url_for_node(node_id=None, node=None):
return finders.find_url_for_node(node) return finders.find_url_for_node(node)
@blueprint.route("/<node_id>/breadcrumbs")
def breadcrumbs(node_id: str):
"""Return breadcrumbs for the given node, as JSON.
Note that a missing parent is still returned in the breadcrumbs,
but with `{_exists: false, name: '-unknown-'}`.
The breadcrumbs start with the top-level parent, and end with the node
itself (marked by {_self: true}). Returns JSON like this:
{breadcrumbs: [
...,
{_id: "parentID",
name: "The Parent Node",
node_type: "group",
url: "/p/project/parentID"},
{_id: "deadbeefbeefbeefbeeffeee",
_self: true,
name: "The Node Itself",
node_type: "asset",
url: "/p/project/nodeID"},
]}
When a parent node is missing, it has a breadcrumb like this:
{_id: "deadbeefbeefbeefbeeffeee",
_exists': false,
name': '-unknown-'}
"""
api = system_util.pillar_api()
is_self = True
def make_crumb(some_node: None) -> dict:
"""Construct a breadcrumb for this node."""
nonlocal is_self
crumb = {
'_id': some_node._id,
'name': some_node.name,
'node_type': some_node.node_type,
'url': finders.find_url_for_node(some_node),
}
if is_self:
crumb['_self'] = True
is_self = False
return crumb
def make_missing_crumb(some_node_id: None) -> dict:
"""Construct 'missing parent' breadcrumb."""
return {
'_id': some_node_id,
'_exists': False,
'name': '-unknown-',
}
# The first node MUST exist.
try:
node = Node.find(node_id, api=api)
except ResourceNotFound:
log.warning('breadcrumbs(node_id=%r): Unable to find node', node_id)
raise wz_exceptions.NotFound(f'Unable to find node {node_id}')
except ForbiddenAccess:
log.warning('breadcrumbs(node_id=%r): access denied to current user', node_id)
raise wz_exceptions.Forbidden(f'No access to node {node_id}')
crumbs = []
while True:
crumbs.append(make_crumb(node))
child_id = node._id
node_id = node.parent
if not node_id:
break
# If a subsequent node doesn't exist any more, include that in the breadcrumbs.
# Forbidden nodes are handled as if they don't exist.
try:
node = Node.find(node_id, api=api)
except (ResourceNotFound, ForbiddenAccess):
log.warning('breadcrumbs: Unable to find node %r but it is marked as parent of %r',
node_id, child_id)
crumbs.append(make_missing_crumb(node_id))
break
return jsonify({'breadcrumbs': list(reversed(crumbs))})
# Import of custom modules (using the same nodes decorator) # Import of custom modules (using the same nodes decorator)
from .custom import groups, storage, posts from .custom import comments, groups, storage, posts

View File

@@ -30,7 +30,6 @@ class ProjectForm(FlaskForm):
('deleted', 'Deleted')]) ('deleted', 'Deleted')])
picture_header = FileSelectField('Picture header', file_format='image') picture_header = FileSelectField('Picture header', file_format='image')
picture_square = FileSelectField('Picture square', file_format='image') picture_square = FileSelectField('Picture square', file_format='image')
picture_16_9 = FileSelectField('Picture 16:9', file_format='image')
def validate(self): def validate(self):
rv = FlaskForm.validate(self) rv = FlaskForm.validate(self)

View File

@@ -303,7 +303,7 @@ def view(project_url):
'header_video_node': header_video_node}) 'header_video_node': header_video_node})
def project_navigation_links(project: typing.Type[Project], api) -> list: def project_navigation_links(project, api) -> list:
"""Returns a list of nodes for the project, for top navigation display. """Returns a list of nodes for the project, for top navigation display.
Args: Args:
@@ -330,7 +330,7 @@ def project_navigation_links(project: typing.Type[Project], api) -> list:
}, api=api) }, api=api)
if blog: if blog:
links.append({'url': finders.find_url_for_node(blog), 'label': blog.name, 'slug': 'blog'}) links.append({'url': finders.find_url_for_node(blog), 'label': blog.name})
# Fetch pages # Fetch pages
pages = Node.all({ pages = Node.all({
@@ -343,13 +343,15 @@ def project_navigation_links(project: typing.Type[Project], api) -> list:
# Process the results and append the links to the list # Process the results and append the links to the list
for p in pages._items: for p in pages._items:
links.append({'url': finders.find_url_for_node(p), 'label': p.name, 'slug': p.properties.url})
links.append({'url': finders.find_url_for_node(p), 'label': p.name})
return links return links
def render_project(project, api, extra_context=None, template_name=None): def render_project(project, api, extra_context=None, template_name=None):
utils.attach_project_pictures(project, api) project.picture_square = utils.get_file(project.picture_square, api=api)
project.picture_header = utils.get_file(project.picture_header, api=api)
def load_latest(list_of_ids, node_type=None): def load_latest(list_of_ids, node_type=None):
"""Loads a list of IDs in reversed order.""" """Loads a list of IDs in reversed order."""
@@ -360,7 +362,6 @@ def render_project(project, api, extra_context=None, template_name=None):
# Construct query parameters outside the loop. # Construct query parameters outside the loop.
projection = {'name': 1, 'user': 1, 'node_type': 1, 'project': 1, projection = {'name': 1, 'user': 1, 'node_type': 1, 'project': 1,
'properties.url': 1, 'properties.content_type': 1, 'properties.url': 1, 'properties.content_type': 1,
'properties.duration_seconds': 1,
'picture': 1} 'picture': 1}
params = {'projection': projection, 'embedded': {'user': 1}} params = {'projection': projection, 'embedded': {'user': 1}}
@@ -414,16 +415,17 @@ def render_project(project, api, extra_context=None, template_name=None):
embed_string = '' embed_string = ''
template_name = "projects/view{0}.html".format(embed_string) template_name = "projects/view{0}.html".format(embed_string)
navigation_links = project_navigation_links(project, api)
extension_sidebar_links = current_app.extension_sidebar_links(project) extension_sidebar_links = current_app.extension_sidebar_links(project)
navigation_links = project_navigation_links(project, api)
return render_template(template_name, return render_template(template_name,
api=api, api=api,
project=project, project=project,
node=None, node=None,
show_node=False, show_node=False,
show_project=True, show_project=True,
og_picture=project.picture_16_9, og_picture=project.picture_header,
activity_stream=activity_stream, activity_stream=activity_stream,
navigation_links=navigation_links, navigation_links=navigation_links,
extension_sidebar_links=extension_sidebar_links, extension_sidebar_links=extension_sidebar_links,
@@ -464,7 +466,6 @@ def view_node(project_url, node_id):
api = system_util.pillar_api() api = system_util.pillar_api()
# First we check if it's a simple string, in which case we are looking for # First we check if it's a simple string, in which case we are looking for
# a static page. Maybe we could use bson.objectid.ObjectId.is_valid(node_id) # a static page. Maybe we could use bson.objectid.ObjectId.is_valid(node_id)
project: typing.Optional[Project] = None
if not utils.is_valid_id(node_id): if not utils.is_valid_id(node_id):
# raise wz_exceptions.NotFound('No such node') # raise wz_exceptions.NotFound('No such node')
project, node = render_node_page(project_url, node_id, api) project, node = render_node_page(project_url, node_id, api)
@@ -482,23 +483,21 @@ def view_node(project_url, node_id):
project = Project.find_one({'where': {"url": project_url, '_id': node.project}}, project = Project.find_one({'where': {"url": project_url, '_id': node.project}},
api=api) api=api)
except ResourceNotFound: except ResourceNotFound:
# In theatre mode, we don't need access to the project at all.
if theatre_mode: if theatre_mode:
pass # In theatre mode, we don't need access to the project at all. project = None
else: else:
raise wz_exceptions.NotFound('No such project') raise wz_exceptions.NotFound('No such project')
navigation_links = []
extension_sidebar_links = ''
og_picture = node.picture = utils.get_file(node.picture, api=api) og_picture = node.picture = utils.get_file(node.picture, api=api)
if project: if project:
utils.attach_project_pictures(project, api)
if not node.picture: if not node.picture:
og_picture = project.picture_16_9 og_picture = utils.get_file(project.picture_header, api=api)
navigation_links = project_navigation_links(project, api) project.picture_square = utils.get_file(project.picture_square, api=api)
extension_sidebar_links = current_app.extension_sidebar_links(project)
# Append _theatre to load the proper template # Append _theatre to load the proper template
theatre = '_theatre' if theatre_mode else '' theatre = '_theatre' if theatre_mode else ''
navigation_links = project_navigation_links(project, api)
if node.node_type == 'page': if node.node_type == 'page':
return render_template('nodes/custom/page/view_embed.html', return render_template('nodes/custom/page/view_embed.html',
@@ -506,9 +505,10 @@ def view_node(project_url, node_id):
node=node, node=node,
project=project, project=project,
navigation_links=navigation_links, navigation_links=navigation_links,
extension_sidebar_links=extension_sidebar_links,
og_picture=og_picture,) og_picture=og_picture,)
extension_sidebar_links = current_app.extension_sidebar_links(project)
return render_template('projects/view{}.html'.format(theatre), return render_template('projects/view{}.html'.format(theatre),
api=api, api=api,
project=project, project=project,
@@ -517,7 +517,7 @@ def view_node(project_url, node_id):
show_project=False, show_project=False,
og_picture=og_picture, og_picture=og_picture,
navigation_links=navigation_links, navigation_links=navigation_links,
extension_sidebar_links=extension_sidebar_links,) extension_sidebar_links=extension_sidebar_links)
def find_project_or_404(project_url, embedded=None, api=None): def find_project_or_404(project_url, embedded=None, api=None):
@@ -540,7 +540,8 @@ def search(project_url):
"""Search into a project""" """Search into a project"""
api = system_util.pillar_api() api = system_util.pillar_api()
project = find_project_or_404(project_url, api=api) project = find_project_or_404(project_url, api=api)
utils.attach_project_pictures(project, api) project.picture_square = utils.get_file(project.picture_square, api=api)
project.picture_header = utils.get_file(project.picture_header, api=api)
return render_template('nodes/search.html', return render_template('nodes/search.html',
project=project, project=project,
@@ -581,8 +582,6 @@ def edit(project_url):
project.picture_square = form.picture_square.data project.picture_square = form.picture_square.data
if form.picture_header.data: if form.picture_header.data:
project.picture_header = form.picture_header.data project.picture_header = form.picture_header.data
if form.picture_16_9.data:
project.picture_16_9 = form.picture_16_9.data
# Update world permissions from is_private checkbox # Update world permissions from is_private checkbox
if form.is_private.data: if form.is_private.data:
@@ -598,8 +597,6 @@ def edit(project_url):
form.picture_square.data = project.picture_square._id form.picture_square.data = project.picture_square._id
if project.picture_header: if project.picture_header:
form.picture_header.data = project.picture_header._id form.picture_header.data = project.picture_header._id
if project.picture_16_9:
form.picture_16_9.data = project.picture_16_9._id
# List of fields from the form that should be hidden to regular users # List of fields from the form that should be hidden to regular users
if current_user.has_role('admin'): if current_user.has_role('admin'):

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,109 @@
(function(vjs) {
"use strict";
var
extend = function(obj) {
var arg, i, k;
for (i = 1; i < arguments.length; i++) {
arg = arguments[i];
for (k in arg) {
if (arg.hasOwnProperty(k)) {
obj[k] = arg[k];
}
}
}
return obj;
},
defaults = {
count: 10,
counter: "counter",
countdown: "countdown",
countdown_text: "Next video in:",
endcard: "player-endcard",
related: "related-content",
next: "next-video",
getRelatedContent: function(callback){callback();},
getNextVid: function(callback){callback();}
},
endcard = function(options) {
var player = this;
var el = this.el();
var settings = extend({}, defaults, options || {});
// set background
var card = document.createElement('div');
card.id = settings.endcard;
card.style.display = 'none';
el.appendChild(card);
settings.getRelatedContent(function(content) {
if (content instanceof Array) {
var related_content_div = document.createElement('div');
related_content_div.id = settings.related;
for (var i = 0; i < content.length; i++) {
related_content_div.appendChild(content[i]);
}
card.appendChild(related_content_div);
}
else {
throw new TypeError("options.getRelatedContent must return an array");
}
});
settings.getNextVid(function(next) {
if (typeof next !== "undefined") {
var next_div = document.createElement('div');
var counter = document.createElement('span');
var countdown = document.createElement('div');
counter.id = settings.counter;
countdown.id = settings.countdown;
next_div.id = settings.next;
countdown.innerHTML = settings.countdown_text;
countdown.appendChild(counter);
next_div.appendChild(countdown);
next_div.appendChild(next);
card.appendChild(next_div);
}
});
var counter_started = 0;
player.on('ended', function() {
card.style.display = 'block';
var next = document.getElementById(settings.next);
if (next !== null) {
var href = next.getElementsByTagName("a")[0].href;
var count = settings.count;
counter.innerHTML = count;
var interval = setInterval(function(){
count--;
if (count <= 0) {
clearInterval(interval);
window.location = href;
return;
}
counter.innerHTML = count;
}, 1000);
}
if (counter_started === 0) {
counter_started++;
player.on('playing', function() {
card.style.display = 'none';
clearInterval(interval);
});
}
});
};
vjs.plugin('endcard', endcard);
})(window.videojs);

View File

@@ -31,10 +31,8 @@ def check_oauth_provider(provider):
@blueprint.route('/authorize/<provider>') @blueprint.route('/authorize/<provider>')
def oauth_authorize(provider): def oauth_authorize(provider):
if current_user.is_authenticated: if not current_user.is_anonymous:
next_after_login = session.pop('next_after_login', None) or url_for('main.homepage') return redirect(url_for('main.homepage'))
log.debug('Redirecting user to %s', next_after_login)
return redirect(next_after_login)
try: try:
oauth = OAuthSignIn.get_provider(provider) oauth = OAuthSignIn.get_provider(provider)
@@ -54,10 +52,8 @@ def oauth_callback(provider):
from pillar.api.utils.authentication import store_token from pillar.api.utils.authentication import store_token
from pillar.api.utils import utcnow from pillar.api.utils import utcnow
next_after_login = session.pop('next_after_login', None) or url_for('main.homepage')
if current_user.is_authenticated: if current_user.is_authenticated:
log.debug('Redirecting user to %s', next_after_login) return redirect(url_for('main.homepage'))
return redirect(next_after_login)
oauth = OAuthSignIn.get_provider(provider) oauth = OAuthSignIn.get_provider(provider)
try: try:
@@ -67,14 +63,11 @@ def oauth_callback(provider):
raise wz_exceptions.Forbidden() raise wz_exceptions.Forbidden()
if oauth_user.id is None: if oauth_user.id is None:
log.debug('Authentication failed for user with {}'.format(provider)) log.debug('Authentication failed for user with {}'.format(provider))
return redirect(next_after_login) return redirect(url_for('main.homepage'))
# Find or create user # Find or create user
user_info = {'id': oauth_user.id, 'email': oauth_user.email, 'full_name': ''} user_info = {'id': oauth_user.id, 'email': oauth_user.email, 'full_name': ''}
db_user = find_user_in_db(user_info, provider=provider) db_user = find_user_in_db(user_info, provider=provider)
if '_deleted' in db_user and db_user['_deleted'] is True:
log.debug('User has been deleted and will not be logge in')
return redirect(next_after_login)
db_id, status = upsert_user(db_user) db_id, status = upsert_user(db_user)
# TODO(Sybren): If the user doesn't have any badges, but the access token # TODO(Sybren): If the user doesn't have any badges, but the access token
@@ -95,8 +88,11 @@ def oauth_callback(provider):
# Check with Blender ID to update certain user roles. # Check with Blender ID to update certain user roles.
update_subscription() update_subscription()
next_after_login = session.pop('next_after_login', None)
if next_after_login:
log.debug('Redirecting user to %s', next_after_login) log.debug('Redirecting user to %s', next_after_login)
return redirect(next_after_login) return redirect(next_after_login)
return redirect(url_for('main.homepage'))
@blueprint.route('/login') @blueprint.route('/login')

View File

@@ -45,7 +45,6 @@ def attach_project_pictures(project, api):
project.picture_square = get_file(project.picture_square, api=api) project.picture_square = get_file(project.picture_square, api=api)
project.picture_header = get_file(project.picture_header, api=api) project.picture_header = get_file(project.picture_header, api=api)
project.picture_16_9 = get_file(project.picture_16_9, api=api)
def mass_attach_project_pictures(projects: typing.Iterable[pillarsdk.Project], *, def mass_attach_project_pictures(projects: typing.Iterable[pillarsdk.Project], *,

View File

@@ -1,7 +1,7 @@
# Primary requirements # Primary requirements
-r ../pillar-python-sdk/requirements.txt -r ../pillar-python-sdk/requirements.txt
attrs==18.2.0 attrs==16.2.0
algoliasearch==1.12.0 algoliasearch==1.12.0
bcrypt==3.1.3 bcrypt==3.1.3
blinker==1.4 blinker==1.4
@@ -14,7 +14,6 @@ Eve==0.8
Flask==1.0.2 Flask==1.0.2
Flask-Babel==0.11.2 Flask-Babel==0.11.2
Flask-Caching==1.4.0 Flask-Caching==1.4.0
Flask-DebugToolbar==0.10.1
Flask-Script==2.0.6 Flask-Script==2.0.6
Flask-Login==0.4.1 Flask-Login==0.4.1
Flask-WTF==0.14.2 Flask-WTF==0.14.2
@@ -42,9 +41,9 @@ asn1crypto==0.24.0
Babel==2.6.0 Babel==2.6.0
billiard==3.5.0.4 billiard==3.5.0.4
Cerberus==1.2 Cerberus==1.2
cffi==1.12.2 cffi==1.10.0
click==6.7 click==6.7
cryptography==2.6.1 cryptography==2.0.3
Events==0.3 Events==0.3
future==0.16.0 future==0.16.0
googleapis-common-protos==1.5.3 googleapis-common-protos==1.5.3
@@ -52,7 +51,7 @@ html5lib==1.0.1
idna==2.5 idna==2.5
ipaddress==1.0.22 ipaddress==1.0.22
itsdangerous==0.24 itsdangerous==0.24
Jinja2==2.10.1 Jinja2==2.10
kombu==4.2.1 kombu==4.2.1
oauth2client==4.1.2 oauth2client==4.1.2
oauthlib==2.1.0 oauthlib==2.1.0
@@ -61,14 +60,14 @@ protobuf==3.6.0
protorpc==0.12.0 protorpc==0.12.0
pyasn1==0.4.4 pyasn1==0.4.4
pyasn1-modules==0.2.2 pyasn1-modules==0.2.2
pycparser==2.19 pycparser==2.17
pymongo==3.7.0 pymongo==3.7.0
pyOpenSSL==16.2.0 pyOpenSSL==16.2.0
pytz==2018.5 pytz==2018.5
requests-oauthlib==1.0.0 requests-oauthlib==1.0.0
rsa==3.4.2 rsa==3.4.2
simplejson==3.16.0 simplejson==3.16.0
six==1.12.0 six==1.10.0
urllib3==1.22 urllib3==1.22
vine==1.1.4 vine==1.1.4
webencodings==0.5.1 webencodings==0.5.1

View File

@@ -11,8 +11,10 @@ $(document).ready(function() {
var what = ''; var what = '';
// Templates binding // Templates binding
var hitTemplate = Hogan.compile($('#hit-template').text());
var statsTemplate = Hogan.compile($('#stats-template').text()); var statsTemplate = Hogan.compile($('#stats-template').text());
var facetTemplate = Hogan.compile($('#facet-template').text()); var facetTemplate = Hogan.compile($('#facet-template').text());
var sliderTemplate = Hogan.compile($('#slider-template').text());
var paginationTemplate = Hogan.compile($('#pagination-template').text()); var paginationTemplate = Hogan.compile($('#pagination-template').text());
// defined in tutti/4_search.js // defined in tutti/4_search.js
@@ -45,7 +47,6 @@ $(document).ready(function() {
renderFacets(content); renderFacets(content);
renderPagination(content); renderPagination(content);
renderFirstHit($(hits).children('.search-hit:first')); renderFirstHit($(hits).children('.search-hit:first'));
updateUrlParams();
}); });
/*************** /***************
@@ -65,7 +66,7 @@ $(document).ready(function() {
window.setTimeout(function() { window.setTimeout(function() {
// Ignore getting that first result when there is none. // Ignore getting that first result when there is none.
var hit_id = firstHit.attr('data-node-id'); var hit_id = firstHit.attr('data-hit-id');
if (hit_id === undefined) { if (hit_id === undefined) {
done(); done();
return; return;
@@ -86,6 +87,12 @@ $(document).ready(function() {
// Initial search // Initial search
initWithUrlParams(); initWithUrlParams();
function convertTimestamp(iso8601) {
var d = new Date(iso8601)
return d.toLocaleDateString();
}
function renderStats(content) { function renderStats(content) {
var stats = { var stats = {
nbHits: numberWithDelimiter(content.count), nbHits: numberWithDelimiter(content.count),
@@ -96,17 +103,20 @@ $(document).ready(function() {
} }
function renderHits(content) { function renderHits(content) {
$hits.empty(); var hitsHtml = '';
if (content.hits.length === 0) { for (var i = 0; i < content.hits.length; ++i) {
$hits.html('<p id="no-hits">We didn\'t find any items. Try searching something else.</p>'); var created = content.hits[i].created_at;
if (created) {
content.hits[i].created_at = convertTimestamp(created);
} }
else { var updated = content.hits[i].updated_at;
listof$hits = content.hits.map(function(hit){ if (updated) {
return pillar.templates.Component.create$listItem(hit) content.hits[i].updated_at = convertTimestamp(updated);
.addClass('js-search-hit cursor-pointer search-hit');
})
$hits.append(listof$hits);
} }
hitsHtml += hitTemplate.render(content.hits[i]);
}
if (content.hits.length === 0) hitsHtml = '<p id="no-hits">We didn\'t find any items. Try searching something else.</p>';
$hits.html(hitsHtml);
} }
function renderFacets(content) { function renderFacets(content) {
@@ -123,7 +133,7 @@ $(document).ready(function() {
var refined = search.isRefined(label, item.key); var refined = search.isRefined(label, item.key);
values.push({ values.push({
facet: label, facet: label,
label: item.key_as_string || item.key, label: item.key,
value: item.key, value: item.key,
count: item.doc_count, count: item.doc_count,
refined: refined, refined: refined,
@@ -143,7 +153,7 @@ $(document).ready(function() {
buckets.forEach(storeValue(values, label)); buckets.forEach(storeValue(values, label));
facets.push({ facets.push({
title: removeUnderscore(label), title: label,
values: values.slice(0), values: values.slice(0),
}); });
} }
@@ -208,9 +218,6 @@ $(document).ready(function() {
$pagination.html(paginationTemplate.render(pagination)); $pagination.html(paginationTemplate.render(pagination));
} }
function removeUnderscore(s) {
return s.replace(/_/g, ' ')
}
// Event bindings // Event bindings
// Click binding // Click binding
@@ -293,46 +300,37 @@ $(document).ready(function() {
}; };
function initWithUrlParams() { function initWithUrlParams() {
var pageURL = decodeURIComponent(window.location.search.substring(1)), var sPageURL = location.hash;
urlVariables = pageURL.split('&'), if (!sPageURL || sPageURL.length === 0) {
query, return true;
i;
for (i = 0; i < urlVariables.length; i++) {
var parameterPair = urlVariables[i].split('='),
key = parameterPair[0],
sValue = parameterPair[1];
if (!key) continue;
if (key === 'q') {
query = sValue;
continue;
} }
if (key === 'page') { var sURLVariables = sPageURL.split('&');
var page = Number.parseInt(sValue) if (!sURLVariables || sURLVariables.length === 0) {
search.setCurrentPage(isNaN(page) ? 0 : page) return true;
continue;
}
if (key === 'project') {
continue; // We take the project from the path
}
if (sValue !== undefined) {
var iValue = Number.parseInt(sValue),
value = isNaN(iValue) ? sValue : iValue;
search.toggleTerm(key, value);
continue;
}
console.log('Unhandled url parameter pair:', parameterPair)
} }
var query = decodeURIComponent(sURLVariables[0].split('=')[1]);
$inputField.val(query); $inputField.val(query);
do_search(query || ''); search.setQuery(query, what);
for (var i = 2; i < sURLVariables.length; i++) {
var sParameterName = sURLVariables[i].split('=');
var facet = decodeURIComponent(sParameterName[0]);
var value = decodeURIComponent(sParameterName[1]);
}
// Page has to be set in the end to avoid being overwritten
var page = decodeURIComponent(sURLVariables[1].split('=')[1]) - 1;
search.setCurrentPage(page);
} }
function updateUrlParams() { function setURLParams(state) {
var prevState = history.state, var urlParams = '?';
prevTitle = document.title, var currentQuery = state.query;
params = search.getParams(), urlParams += 'q=' + encodeURIComponent(currentQuery);
newUrl = window.location.pathname + '?'; var currentPage = state.page + 1;
delete params['project'] // We take the project from the path urlParams += '&page=' + currentPage;
newUrl += jQuery.param(params) location.replace(urlParams);
history.replaceState(prevState, prevTitle, newUrl);
} }
// do empty search to fill aggregations
do_search('');
}); });

View File

@@ -1,2 +0,0 @@
Gulp will transpile everything in this folder. Every sub folder containing a init.js file exporting functions/classes
will be packed into a module in tutti.js under the namespace pillar.FOLDER_NAME.

View File

@@ -1,46 +0,0 @@
function thenGetComments(parentId) {
return $.getJSON(`/api/nodes/${parentId}/comments`);
}
function thenCreateComment(parentId, msg, attachments) {
let data = JSON.stringify({
msg: msg,
attachments: attachments
});
return $.ajax({
url: `/api/nodes/${parentId}/comments`,
type: 'POST',
data: data,
dataType: 'json',
contentType: 'application/json; charset=UTF-8'
});
}
function thenUpdateComment(parentId, commentId, msg, attachments) {
let data = JSON.stringify({
msg: msg,
attachments: attachments
});
return $.ajax({
url: `/api/nodes/${parentId}/comments/${commentId}`,
type: 'PATCH',
data: data,
dataType: 'json',
contentType: 'application/json; charset=UTF-8'
});
}
function thenVoteComment(parentId, commentId, vote) {
let data = JSON.stringify({
vote: vote
});
return $.ajax({
url: `/api/nodes/${parentId}/comments/${commentId}/vote`,
type: 'POST',
data: data,
dataType: 'json',
contentType: 'application/json; charset=UTF-8'
});
}
export { thenGetComments, thenCreateComment, thenUpdateComment, thenVoteComment }

View File

@@ -1,54 +0,0 @@
function thenUploadFile(projectId, file, progressCB=(total, loaded)=>{}) {
let formData = createFormData(file)
return $.ajax({
url: `/api/storage/stream/${projectId}`,
type: 'POST',
data: formData,
cache: false,
contentType: false,
processData: false,
xhr: () => {
let myxhr = $.ajaxSettings.xhr();
if (myxhr.upload) {
// For handling the progress of the upload
myxhr.upload.addEventListener('progress', function(e) {
if (e.lengthComputable) {
progressCB(e.total, e.loaded);
}
}, false);
}
return myxhr;
}
});
}
function createFormData(file) {
let formData = new FormData();
formData.append('file', file);
return formData;
}
function thenGetFileDocument(fileId) {
return $.get(`/api/files/${fileId}`);
}
function getFileVariation(fileDoc, size = 'm') {
var show_variation = null;
if (typeof fileDoc.variations != 'undefined') {
for (var variation of fileDoc.variations) {
if (variation.size != size) continue;
show_variation = variation;
break;
}
}
if (show_variation == null) {
throw 'Image not found: ' + fileDoc._id + ' size: ' + size;
}
return show_variation;
}
export { thenUploadFile, thenGetFileDocument, getFileVariation }

View File

@@ -1,7 +0,0 @@
/**
* Functions for communicating with the pillar server api
*/
export { thenMarkdownToHtml } from './markdown'
export { thenGetProject } from './projects'
export { thenGetNodes, thenGetNode, thenGetNodeActivities, thenUpdateNode, thenDeleteNode } from './nodes'
export { thenGetProjectUsers } from './users'

View File

@@ -1,17 +0,0 @@
function thenMarkdownToHtml(markdown, attachments={}) {
let data = JSON.stringify({
content: markdown,
attachments: attachments
});
return $.ajax({
url: "/nodes/preview-markdown",
type: 'POST',
headers: {"X-CSRFToken": csrf_token},
headers: {},
data: data,
dataType: 'json',
contentType: 'application/json; charset=UTF-8'
})
}
export { thenMarkdownToHtml }

View File

@@ -1,82 +0,0 @@
function thenGetNodes(where, embedded={}, sort='') {
let encodedWhere = encodeURIComponent(JSON.stringify(where));
let encodedEmbedded = encodeURIComponent(JSON.stringify(embedded));
let encodedSort = encodeURIComponent(sort);
return $.ajax({
url: `/api/nodes?where=${encodedWhere}&embedded=${encodedEmbedded}&sort=${encodedSort}`,
cache: false,
});
}
function thenGetNode(nodeId) {
return $.ajax({
url: `/api/nodes/${nodeId}`,
cache: false,
});
}
function thenGetNodeActivities(nodeId, sort='[("_created", -1)]', max_results=20, page=1) {
let encodedSort = encodeURIComponent(sort);
return $.ajax({
url: `/api/nodes/${nodeId}/activities?sort=${encodedSort}&max_results=${max_results}&page=${page}`,
cache: false,
});
}
function thenUpdateNode(node) {
let id = node['_id'];
let etag = node['_etag'];
let nodeToSave = removePrivateKeys(node);
let data = JSON.stringify(nodeToSave);
return $.ajax({
url: `/api/nodes/${id}`,
type: 'PUT',
data: data,
dataType: 'json',
contentType: 'application/json; charset=UTF-8',
headers: {'If-Match': etag},
}).then(updatedInfo => {
return thenGetNode(updatedInfo['_id'])
.then(node => {
pillar.events.Nodes.triggerUpdated(node);
return node;
})
});
}
function thenDeleteNode(node) {
let id = node['_id'];
let etag = node['_etag'];
return $.ajax({
url: `/api/nodes/${id}`,
type: 'DELETE',
headers: {'If-Match': etag},
}).then(() => {
pillar.events.Nodes.triggerDeleted(id);
});
}
function removePrivateKeys(doc) {
function doRemove(d) {
for (const key in d) {
if (key.startsWith('_')) {
delete d[key];
continue;
}
let val = d[key];
if(typeof val === 'object') {
doRemove(val);
}
}
}
let docCopy = JSON.parse(JSON.stringify(doc));
doRemove(docCopy);
delete docCopy['allowed_methods']
return docCopy;
}
export { thenGetNodes, thenGetNode, thenGetNodeActivities, thenUpdateNode, thenDeleteNode }

View File

@@ -1,5 +0,0 @@
function thenGetProject(projectId) {
return $.get(`/api/projects/${projectId}`);
}
export { thenGetProject }

View File

@@ -1,7 +0,0 @@
function thenGetProjectUsers(projectId) {
return $.ajax({
url: `/api/p/users?project_id=${projectId}`,
});
}
export { thenGetProjectUsers }

View File

@@ -1,167 +0,0 @@
/**
* Helper class to trigger/listen to global events on new/updated/deleted nodes.
*
* @example
* function myCallback(event) {
* console.log('Updated node:', event.detail);
* }
* // Register a callback:
* Nodes.onUpdated('5c1cc4a5a013573d9787164b', myCallback);
* // When changing the node, notify the listeners:
* Nodes.triggerUpdated(myUpdatedNode);
*/
class EventName {
static parentCreated(parentId, node_type) {
return `pillar:node:${parentId}:created-${node_type}`;
}
static globalCreated(node_type) {
return `pillar:node:created-${node_type}`;
}
static updated(nodeId) {
return `pillar:node:${nodeId}:updated`;
}
static deleted(nodeId) {
return `pillar:node:${nodeId}:deleted`;
}
static loaded() {
return `pillar:node:loaded`;
}
}
function trigger(eventName, data) {
document.dispatchEvent(new CustomEvent(eventName, {detail: data}));
}
function on(eventName, cb) {
document.addEventListener(eventName, cb);
}
function off(eventName, cb) {
document.removeEventListener(eventName, cb);
}
class Nodes {
/**
* Trigger events that node has been created
* @param {Object} node
*/
static triggerCreated(node) {
if (node.parent) {
trigger(
EventName.parentCreated(node.parent, node.node_type),
node);
}
trigger(
EventName.globalCreated(node.node_type),
node);
}
/**
* Get notified when new nodes where parent === parentId and node_type === node_type
* @param {String} parentId
* @param {String} node_type
* @param {Function(Event)} cb
*/
static onParentCreated(parentId, node_type, cb){
on(
EventName.parentCreated(parentId, node_type),
cb);
}
static offParentCreated(parentId, node_type, cb){
off(
EventName.parentCreated(parentId, node_type),
cb);
}
/**
* Get notified when new nodes where node_type === node_type is created
* @param {String} node_type
* @param {Function(Event)} cb
*/
static onCreated(node_type, cb){
on(
EventName.globalCreated(node_type),
cb);
}
static offCreated(node_type, cb){
off(
EventName.globalCreated(node_type),
cb);
}
static triggerUpdated(node) {
trigger(
EventName.updated(node._id),
node);
}
/**
* Get notified when node with _id === nodeId is updated
* @param {String} nodeId
* @param {Function(Event)} cb
*/
static onUpdated(nodeId, cb) {
on(
EventName.updated(nodeId),
cb);
}
static offUpdated(nodeId, cb) {
off(
EventName.updated(nodeId),
cb);
}
/**
* Notify that node has been deleted.
* @param {String} nodeId
*/
static triggerDeleted(nodeId) {
trigger(
EventName.deleted(nodeId),
nodeId);
}
/**
* Listen to events of nodes being deleted where _id === nodeId
* @param {String} nodeId
* @param {Function(Event)} cb
*/
static onDeleted(nodeId, cb) {
on(
EventName.deleted(nodeId),
cb);
}
static offDeleted(nodeId, cb) {
off(
EventName.deleted(nodeId),
cb);
}
static triggerLoaded(nodeId) {
trigger(EventName.loaded(), {nodeId: nodeId});
}
/**
* Listen to events of nodes being loaded for display
* @param {Function(Event)} cb
*/
static onLoaded(cb) {
on(EventName.loaded(), cb);
}
static offLoaded(cb) {
off(EventName.loaded(), cb);
}
}
export { Nodes }

View File

@@ -1,4 +0,0 @@
/**
* Collecting Custom Pillar events here
*/
export {Nodes} from './Nodes'

View File

@@ -1,64 +0,0 @@
import {SearchParams} from './SearchParams';
export class MultiSearch {
constructor(kwargs) {
this.uiUrl = kwargs['uiUrl']; // Url for advanced search
this.apiUrl = kwargs['apiUrl']; // Url for api calls
this.searchParams = MultiSearch.createMultiSearchParams(kwargs['searchParams']);
this.q = '';
}
setSearchWord(q) {
this.q = q;
this.searchParams.forEach((qsParam) => {
qsParam.setSearchWord(q);
});
}
getSearchUrl() {
return this.uiUrl + '?q=' + this.q;
}
getAllParams() {
let retval = $.map(this.searchParams, (msParams) => {
return msParams.params;
});
return retval;
}
parseResult(rawResult) {
return $.map(rawResult, (subResult, index) => {
let name = this.searchParams[index].name;
let pStr = this.searchParams[index].getParamStr();
let result = $.map(subResult.hits.hits, (hit) => {
return hit._source;
});
return {
name: name,
url: this.uiUrl + '?' + pStr,
result: result,
hasResults: !!result.length
};
});
}
thenExecute() {
let data = JSON.stringify(this.getAllParams());
let rawAjax = $.ajax({
url: this.apiUrl,
type: 'POST',
data: data,
dataType: 'json',
contentType: 'application/json; charset=UTF-8'
});
let prettyPromise = rawAjax.then(this.parseResult.bind(this));
prettyPromise['abort'] = rawAjax.abort.bind(rawAjax); // Hack to be able to abort the promise down the road
return prettyPromise;
}
static createMultiSearchParams(argsList) {
return $.map(argsList, (args) => {
return new SearchParams(args);
});
}
}

View File

@@ -1,204 +0,0 @@
import { create$noHits, create$results, create$input } from './templates'
import {SearchFacade} from './SearchFacade';
/**
* QuickSearch : Interacts with the dom document
* 1-SearchFacade : Controls which multisearch is active
* *-MultiSearch : One multi search is typically Project or Cloud
* *-SearchParams : The search params for the individual searches
*/
export class QuickSearch {
/**
* Interacts with the dom document and deligates the input down to the SearchFacade
* @param {selector string} searchToggle The quick-search toggle
* @param {*} kwargs
*/
constructor(searchToggle, kwargs) {
this.$body = $('body');
this.$quickSearch = $('.quick-search');
this.$inputComponent = $(kwargs['inputTarget']);
this.$inputComponent.empty();
this.$inputComponent.append(create$input(kwargs['searches']));
this.$searchInput = this.$inputComponent.find('input');
this.$searchSelect = this.$inputComponent.find('select');
this.$resultTarget = $(kwargs['resultTarget']);
this.$searchSymbol = this.$inputComponent.find('.qs-busy-symbol');
this.searchFacade = new SearchFacade(kwargs['searches'] || {});
this.$searchToggle = $(searchToggle);
this.isBusy = false;
this.attach();
}
attach() {
if (this.$searchSelect.length) {
this.$searchSelect
.change(this.execute.bind(this))
.change(() => this.$searchInput.focus());
this.$searchInput.addClass('multi-scope');
}
this.$searchInput
.keyup(this.onInputKeyUp.bind(this));
this.$inputComponent
.on('pillar:workStart', () => {
this.$searchSymbol.addClass('spinner')
this.$searchSymbol.toggleClass('pi-spin pi-cancel')
})
.on('pillar:workStop', () => {
this.$searchSymbol.removeClass('spinner')
this.$searchSymbol.toggleClass('pi-spin pi-cancel')
});
this.searchFacade.setOnResultCB(this.renderResult.bind(this));
this.searchFacade.setOnFailureCB(this.onSearchFailed.bind(this));
this.$searchToggle
.one('click', this.execute.bind(this)); // Initial search executed once
this.registerShowGui();
this.registerHideGui();
}
registerShowGui() {
this.$searchToggle
.click((e) => {
this.showGUI();
e.stopPropagation();
});
}
registerHideGui() {
this.$searchSymbol
.click(() => {
this.hideGUI();
});
this.$body.click((e) => {
let $target = $(e.target);
let isClickInResult = $target.hasClass('.qs-result') || !!$target.parents('.qs-result').length;
let isClickInInput = $target.hasClass('.qs-input') || !!$target.parents('.qs-input').length;
if (!isClickInResult && !isClickInInput) {
this.hideGUI();
}
});
$(document).keyup((e) => {
if (e.key === 'Escape') {
this.hideGUI();
}
});
}
showGUI() {
this.$body.addClass('has-overlay');
this.$quickSearch.trigger('pillar:searchShow');
this.$quickSearch.addClass('show');
if (!this.$searchInput.is(':focus')) {
this.$searchInput.focus();
}
}
hideGUI() {
this.$body.removeClass('has-overlay');
this.$searchToggle.addClass('pi-search');
this.$searchInput.blur();
this.$quickSearch.removeClass('show');
this.$quickSearch.trigger('pillar:searchHidden');
}
onInputKeyUp(e) {
let newQ = this.$searchInput.val();
let currQ = this.searchFacade.getSearchWord();
this.searchFacade.setSearchWord(newQ);
let searchUrl = this.searchFacade.getSearchUrl();
if (e.key === 'Enter') {
window.location.href = searchUrl;
return;
}
if (newQ !== currQ) {
this.execute();
}
}
execute() {
this.busy(true);
let scope = this.getScope();
this.searchFacade.setCurrentScope(scope);
let q = this.$searchInput.val();
this.searchFacade.setSearchWord(q);
this.searchFacade.execute();
}
renderResult(results) {
this.$resultTarget.empty();
this.$resultTarget.append(this.create$result(results));
this.busy(false);
}
create$result(results) {
let withHits = results.reduce((aggr, subResult) => {
if (subResult.hasResults) {
aggr.push(subResult);
}
return aggr;
}, []);
if (!withHits.length) {
return create$noHits(this.searchFacade.getSearchUrl());
}
return create$results(results, this.searchFacade.getSearchUrl());
}
onSearchFailed(err) {
toastr.error(xhrErrorResponseMessage(err), 'Unable to perform search:');
this.busy(false);
this.$inputComponent.trigger('pillar:failed', err);
}
getScope() {
return !!this.$searchSelect.length ? this.$searchSelect.val() : 'cloud';
}
busy(val) {
if (val !== this.isBusy) {
var eventType = val ? 'pillar:workStart' : 'pillar:workStop';
this.$inputComponent.trigger(eventType);
}
this.isBusy = val;
}
}
$.fn.extend({
/**
* $('#qs-toggle').quickSearch({
* resultTarget: '#search-overlay',
* inputTarget: '#qs-input',
* searches: {
* project: {
* name: 'Project',
* uiUrl: '{{ url_for("projects.search", project_url=project.url)}}',
* apiUrl: '/api/newsearch/multisearch',
* searchParams: [
* {name: 'Assets', params: {project: '{{ project._id }}', node_type: 'asset'}},
* {name: 'Blog', params: {project: '{{ project._id }}', node_type: 'post'}},
* {name: 'Groups', params: {project: '{{ project._id }}', node_type: 'group'}},
* ]
* },
* cloud: {
* name: 'Cloud',
* uiUrl: '/search',
* apiUrl: '/api/newsearch/multisearch',
* searchParams: [
* {name: 'Assets', params: {node_type: 'asset'}},
* {name: 'Blog', params: {node_type: 'post'}},
* {name: 'Groups', params: {node_type: 'group'}},
* ]
* },
* },
* });
* @param {*} kwargs
*/
quickSearch: function (kwargs) {
$(this).each((i, qsElem) => {
new QuickSearch(qsElem, kwargs);
});
}
})

View File

@@ -1,68 +0,0 @@
import {MultiSearch} from './MultiSearch';
export class SearchFacade {
/**
* One SearchFacade holds n-number of MultiSearch objects, and delegates search requests to the active mutlisearch
* @param {*} kwargs
*/
constructor(kwargs) {
this.searches = SearchFacade.createMultiSearches(kwargs);
this.currentScope = 'cloud'; // which multisearch to use
this.currRequest;
this.resultCB;
this.failureCB;
this.q = '';
}
setSearchWord(q) {
this.q = q;
$.each(this.searches, (k, mSearch) => {
mSearch.setSearchWord(q);
});
}
getSearchWord() {
return this.q;
}
getSearchUrl() {
return this.searches[this.currentScope].getSearchUrl();
}
setCurrentScope(scope) {
this.currentScope = scope;
}
execute() {
if (this.currRequest) {
this.currRequest.abort();
}
this.currRequest = this.searches[this.currentScope].thenExecute();
this.currRequest
.then((results) => {
this.resultCB(results);
})
.fail((err, reason) => {
if (reason == 'abort') {
return;
}
this.failureCB(err);
});
}
setOnResultCB(cb) {
this.resultCB = cb;
}
setOnFailureCB(cb) {
this.failureCB = cb;
}
static createMultiSearches(kwargs) {
var searches = {};
$.each(kwargs, (key, value) => {
searches[key] = new MultiSearch(value);
});
return searches;
}
}

View File

@@ -1,14 +0,0 @@
export class SearchParams {
constructor(kwargs) {
this.name = kwargs['name'] || '';
this.params = kwargs['params'] || {};
}
setSearchWord(q) {
this.params['q'] = q || '';
}
getParamStr() {
return jQuery.param(this.params);
}
}

View File

@@ -1 +0,0 @@
export { QuickSearch } from './QuickSearch';

View File

@@ -1,93 +0,0 @@
/**
* Creates the jQuery object that is rendered when nothing is found
* @param {String} advancedUrl Url to the advanced search with the current query
* @returns {$element} The jQuery element that is rendered wher there are no hits
*/
function create$noHits(advancedUrl) {
return $('<div>')
.addClass('qs-msg text-center p-3')
.append(
$('<div>')
.addClass('h1 pi-displeased'),
$('<div>')
.addClass('h2')
.append(
$('<a>')
.attr('href', advancedUrl)
.text('Advanced search')
)
)
}
/**
* Creates the jQuery object that is rendered as the search input
* @param {Dict} searches The searches dict that is passed in on construction of the Quick-Search
* @returns {$element} The jQuery object that renders the search input components.
*/
function create$input(searches) {
let input = $('<input>')
.addClass('qs-input')
.attr('type', 'search')
.attr('autocomplete', 'off')
.attr('spellcheck', 'false')
.attr('autocorrect', 'false')
.attr('placeholder', 'Search...');
let workingSymbol = $('<i>')
.addClass('pi-cancel qs-busy-symbol');
let inputComponent = [input, workingSymbol];
if (Object.keys(searches).length > 1) {
let i = 0;
let select = $('<select>')
.append(
$.map(searches, (it, value) => {
let option = $('<option>')
.attr('value', value)
.text(it['name']);
if (i === 0) {
option.attr('selected', 'selected');
}
i += 1;
return option;
})
);
inputComponent.push(select);
}
return inputComponent;
}
/**
* Creates the search result
* @param {List} results
* @param {String} advancedUrl
* @returns {$element} The jQuery object that is rendered as the result
*/
function create$results(results, advancedUrl) {
let $results = results.reduce((agg, res)=> {
if(res['result'].length) {
agg.push(
$('<a>')
.addClass('h4 mt-4 d-flex')
.attr('href', res['url'])
.text(res['name'])
)
agg.push(
$('<div>')
.addClass('card-deck card-deck-responsive card-padless js-asset-list p-3')
.append(
...pillar.templates.Nodes.createListOf$nodeItems(res['result'], 10, 0)
)
)
}
return agg;
}, [])
$results.push(
$('<a>')
.attr('href', advancedUrl)
.text('Advanced search...')
)
return $('<div>')
.addClass('m-auto qs-result')
.append(...$results)
}
export { create$noHits, create$results, create$input }

View File

@@ -1,2 +0,0 @@
This module is used to render nodes/users dynamically. It was written before we introduced vue.js into the project.
Current best practice is to use vue for this type of work.

View File

@@ -1,124 +0,0 @@
import { Assets } from '../nodes/Assets'
jest.useFakeTimers();
describe('Assets', () => {
describe('create$listItem', () => {
let nodeDoc;
let spyGet;
beforeEach(()=>{
// mock now to get a stable pretty printed created
Date.now = jest.fn(() => new Date(Date.UTC(2018,
10, //November! zero based month!
28, 11, 46, 30)).valueOf()); // A Tuesday
nodeDoc = {
_id: 'my-asset-id',
name: 'My Asset',
node_type: 'asset',
_created: "Wed, 07 Nov 2018 16:35:09 GMT",
project: {
name: 'My Project',
url: 'url-to-project'
},
properties: {
content_type: 'image'
}
};
spyGet = spyOn($, 'get').and.callFake(function(url) {
let ajaxMock = $.Deferred();
let response = {
variations: [{
size: 'l',
link: 'wrong-img-link',
width: 150,
height: 170,
},{
size: 'm',
link: 'img-link',
width: 50,
height: 70,
},{
size: 's',
link: 'wrong-img-link',
width: 5,
height: 7,
}]
}
ajaxMock.resolve(response);
return ajaxMock.promise();
});
});
describe('image content', () => {
test('node with picture', done => {
nodeDoc.picture = 'picture_id';
let $card = Assets.create$listItem(nodeDoc);
jest.runAllTimers();
expect($card.length).toEqual(1);
expect($card.prop('tagName')).toEqual('A'); // <a>
expect($card.hasClass('asset')).toBeTruthy();
expect($card.hasClass('card')).toBeTruthy();
expect($card.attr('href')).toEqual('/nodes/my-asset-id/redir');
expect($card.attr('title')).toEqual('My Asset');
let $body = $card.find('.card-body');
expect($body.length).toEqual(1);
let $title = $body.find('.card-title');
expect($title.length).toEqual(1);
expect(spyGet).toHaveBeenCalledTimes(1);
expect(spyGet).toHaveBeenLastCalledWith('/api/files/picture_id');
let $image = $card.find('img');
expect($image.length).toEqual(1);
let $imageSubsititure = $card.find('.pi-asset');
expect($imageSubsititure.length).toEqual(0);
let $progress = $card.find('.progress');
expect($progress.length).toEqual(0);
let $watched = $card.find('.card-label');
expect($watched.length).toEqual(0);
expect($card.find(':contains(3 weeks ago)').length).toBeTruthy();
done();
});
test('node without picture', done => {
let $card = Assets.create$listItem(nodeDoc);
expect($card.length).toEqual(1);
expect($card.prop('tagName')).toEqual('A'); // <a>
expect($card.hasClass('asset')).toBeTruthy();
expect($card.hasClass('card')).toBeTruthy();
expect($card.attr('href')).toEqual('/nodes/my-asset-id/redir');
expect($card.attr('title')).toEqual('My Asset');
let $body = $card.find('.card-body');
expect($body.length).toEqual(1);
let $title = $body.find('.card-title');
expect($title.length).toEqual(1);
expect(spyGet).toHaveBeenCalledTimes(0);
let $image = $card.find('img');
expect($image.length).toEqual(0);
let $imageSubsititure = $card.find('.pi-asset');
expect($imageSubsititure.length).toEqual(1);
let $progress = $card.find('.progress');
expect($progress.length).toEqual(0);
let $watched = $card.find('.card-label');
expect($watched.length).toEqual(0);
expect($card.find(':contains(3 weeks ago)').length).toBeTruthy();
done();
});
});
})
});

View File

@@ -1,48 +0,0 @@
import { Assets } from '../nodes/Assets'
import { Users } from '../users/Users'
import { Component } from '../init' // Component is initialized in init
describe('Component', () => {
test('can create Users listItem', () => {
let userDoc = {
_id: 'my-user-id',
username: 'My User Name',
full_name: 'My full name',
roles: ['admin', 'subscriber']
};
let $user_actual = Component.create$listItem(userDoc);
expect($user_actual.length).toBe(1);
let $user_reference = Users.create$listItem(userDoc);
expect($user_actual).toEqual($user_reference);
});
test('can create Asset listItem', () => {
let nodeDoc = {
_id: 'my-asset-id',
name: 'My Asset',
node_type: 'asset',
project: {
name: 'My Project',
url: 'url-to-project'
},
properties: {
content_type: 'image'
}
};
let $asset_actual = Component.create$listItem(nodeDoc);
expect($asset_actual.length).toBe(1);
let $asset_reference = Assets.create$listItem(nodeDoc);
expect($asset_actual).toEqual($asset_reference);
});
test('fail to create unknown', () => {
expect(()=>Component.create$listItem({})).toThrow('Can not create component using: {}')
expect(()=>Component.create$listItem()).toThrow('Can not create component using: undefined')
expect(()=>Component.create$listItem({strange: 'value'}))
.toThrow('Can not create component using: {"strange":"value"}')
});
});

View File

@@ -1,59 +0,0 @@
import { ComponentCreatorInterface } from './ComponentCreatorInterface'
const REGISTERED_CREATORS = []
/**
* Create a jQuery renderable element from a mongo document using registered creators.
* @deprecated use vue instead
*/
export class Component extends ComponentCreatorInterface {
/**
*
* @param {Object} doc
* @returns {$element}
*/
static create$listItem(doc) {
let creator = Component.getCreator(doc);
return creator.create$listItem(doc);
}
/**
* @param {Object} doc
* @returns {$element}
*/
static create$item(doc) {
let creator = Component.getCreator(doc);
return creator.create$item(doc);
}
/**
* @param {Object} candidate
* @returns {Boolean}
*/
static canCreate(candidate) {
return !!Component.getCreator(candidate);
}
/**
* Register component creator to handle a node type
* @param {ComponentCreatorInterface} creator
*/
static regiseterCreator(creator) {
REGISTERED_CREATORS.push(creator);
}
/**
* @param {Object} doc
* @returns {ComponentCreatorInterface}
*/
static getCreator(doc) {
if (doc) {
for (let candidate of REGISTERED_CREATORS) {
if (candidate.canCreate(doc)) {
return candidate;
}
}
}
throw 'Can not create component using: ' + JSON.stringify(doc);
}
}

View File

@@ -1,30 +0,0 @@
/**
* @deprecated use vue instead
*/
export class ComponentCreatorInterface {
/**
* Create a $element to render document in a list
* @param {Object} doc
* @returns {$element}
*/
static create$listItem(doc) {
throw 'Not Implemented';
}
/**
* Create a $element to render the full doc
* @param {Object} doc
* @returns {$element}
*/
static create$item(doc) {
throw 'Not Implemented';
}
/**
* @param {Object} candidate
* @returns {boolean}
*/
static canCreate(candidate) {
throw 'Not Implemented';
}
}

View File

@@ -1,18 +0,0 @@
import { Nodes } from './nodes/Nodes';
import { Assets } from './nodes/Assets';
import { Posts } from './nodes/Posts';
import { Users } from './users/Users';
import { Component } from './component/Component';
Nodes.registerTemplate('asset', Assets);
Nodes.registerTemplate('post', Posts);
Component.regiseterCreator(Nodes);
Component.regiseterCreator(Users);
export {
Nodes,
Users,
Component
};

View File

@@ -1,49 +0,0 @@
import { NodesBase } from "./NodesBase";
import { thenLoadVideoProgress } from '../utils';
/**
* Create $element from a node of type asset
* @deprecated use vue instead
*/
export class Assets extends NodesBase{
static create$listItem(node) {
var markIfPublic = true;
let $card = super.create$listItem(node);
$card.addClass('asset');
if (node.properties && node.properties.duration){
let $thumbnailContainer = $card.find('.js-thumbnail-container')
let $cardDuration = $('<div class="card-label right">' + node.properties.duration + '</div>');
$thumbnailContainer.append($cardDuration);
/* Video progress and 'watched' label. */
$(window).trigger('pillar:workStart');
thenLoadVideoProgress(node._id)
.fail(console.log)
.then((view_progress)=>{
if (!view_progress) return
let $cardProgress = $('<div class="progress rounded-0">');
let $cardProgressBar = $('<div class="progress-bar">');
$cardProgressBar.css('width', view_progress.progress_in_percent + '%');
$cardProgress.append($cardProgressBar);
$thumbnailContainer.append($cardProgress);
if (view_progress.done){
let card_progress_done = $('<div class="card-label">WATCHED</div>');
$thumbnailContainer.append(card_progress_done);
}
})
.always(function() {
$(window).trigger('pillar:workStop');
});
}
/* 'Free' ribbon for public assets. */
if (markIfPublic && node.permissions && node.permissions.world){
$card.addClass('free');
}
return $card;
}
}

View File

@@ -1,67 +0,0 @@
import { NodesBase } from './NodesBase';
import { ComponentCreatorInterface } from '../component/ComponentCreatorInterface'
let CREATE_NODE_ITEM_MAP = {}
/**
* Create $element from node object
* @deprecated use vue instead
*/
export class Nodes extends ComponentCreatorInterface {
/**
* Creates a small list item out of a node document
* @param {NodeDoc} node mongodb or elastic node document
*/
static create$listItem(node) {
let factory = CREATE_NODE_ITEM_MAP[node.node_type] || NodesBase;
return factory.create$listItem(node);
}
/**
* Creates a full view out of a node document
* @param {NodeDoc} node mongodb or elastic node document
*/
static create$item(node) {
let factory = CREATE_NODE_ITEM_MAP[node.node_type] || NodesBase;
return factory.create$item(node);
}
/**
* Creates a list of items and a 'Load More' button
* @param {List} nodes A list of nodes to be created
* @param {Int} initial Number of nodes to show initially
* @param {Int} loadNext Number of nodes to show when clicking 'Load More'. If 0, no load more button will be shown
*/
static createListOf$nodeItems(nodes, initial=8, loadNext=8) {
let nodesLeftToRender = nodes.slice();
let nodesToCreate = nodesLeftToRender.splice(0, initial);
let listOf$items = nodesToCreate.map(Nodes.create$listItem);
if (loadNext > 0 && nodesLeftToRender.length) {
let $link = $('<a>')
.addClass('btn btn-outline-primary px-5 mb-auto btn-block js-load-next')
.attr('href', 'javascript:void(0);')
.click((e)=> {
let $target = $(e.target);
$target.replaceWith(Nodes.createListOf$nodeItems(nodesLeftToRender, loadNext, loadNext));
})
.text('Load More');
listOf$items.push($link);
}
return listOf$items;
}
static canCreate(candidate) {
return !!candidate.node_type;
}
/**
* Register template classes to handle the cunstruction of diffrent node types
* @param { String } node_type The node type whose template that is registered
* @param { NodesBase } klass The class to handle the creation of jQuery objects
*/
static registerTemplate(node_type, klass) {
CREATE_NODE_ITEM_MAP[node_type] = klass;
}
}

View File

@@ -1,61 +0,0 @@
import { prettyDate } from '../../utils/prettydate';
import { ComponentCreatorInterface } from '../component/ComponentCreatorInterface'
/**
* @deprecated use vue instead
*/
export class NodesBase extends ComponentCreatorInterface {
static create$listItem(node) {
let nid = (node._id || node.objectID); // To support both mongo and elastic nodes
let $card = $('<a class="card node card-image-fade asset">')
.attr('data-node-id', nid)
.attr('href', '/nodes/' + nid + '/redir')
.attr('title', node.name);
let $thumbnailContainer = $('<div class="card-thumbnail js-thumbnail-container">');
function warnNoPicture() {
let $cardIcon = $('<div class="card-img-top card-icon">');
$cardIcon.html('<i class="pi-' + node.node_type + '">');
$thumbnailContainer.append($cardIcon);
}
if (!node.picture) {
warnNoPicture();
}
else {
$(window).trigger('pillar:workStart');
pillar.utils.thenLoadImage(node.picture)
.fail(warnNoPicture)
.then((imgVariation) => {
let img = $('<img class="card-img-top">')
.attr('alt', node.name)
.attr('src', imgVariation.link)
.attr('width', imgVariation.width)
.attr('height', imgVariation.height);
$thumbnailContainer.append(img);
})
.always(function () {
$(window).trigger('pillar:workStop');
});
}
$card.append($thumbnailContainer);
/* Card body for title and meta info. */
let $cardBody = $('<div class="card-body p-2 d-flex flex-column">');
let $cardTitle = $('<div class="card-title px-2 mb-2 font-weight-bold">');
$cardTitle.text(node.name);
$cardBody.append($cardTitle);
let $cardMeta = $('<ul class="card-text px-2 list-unstyled d-flex text-black-50 mt-auto">');
let $cardProject = $('<a class="font-weight-bold pr-2">')
.attr('href', '/p/' + node.project.url)
.attr('title', node.project.name)
.text(node.project.name);
$cardMeta.append($cardProject);
let created = node._created || node.created_at; // mongodb + elastic
$cardMeta.append('<li>' + prettyDate(created) + '</li>');
$cardBody.append($cardMeta);
$card.append($cardBody);
return $card;
}
static canCreate(candidate) {
return !!candidate.node_type;
}
}

View File

@@ -1,27 +0,0 @@
import { NodesBase } from "./NodesBase";
/**
* Create $element from a node of type post
* @deprecated use vue instead
*/
export class Posts extends NodesBase {
static create$item(post) {
let content = [];
let $title = $('<a>')
.attr('href', '/nodes/' + post._id + '/redir')
.attr('title', post.name)
.addClass('h1 text-uppercase font-weight-bold d-block pt-5 pb-2')
.text(post.name);
content.push($title);
let $post = $('<div>')
.addClass('expand-image-links imgs-fluid')
.append(
content,
$('<div>')
.addClass('node-details-description')
.html(post['properties']['pretty_content'])
);
return $post;
}
}

View File

@@ -1,28 +0,0 @@
import { ComponentCreatorInterface } from '../component/ComponentCreatorInterface'
/**
* Create $elements from user objects
* @deprecated use vue instead
*/
export class Users extends ComponentCreatorInterface {
static create$listItem(userDoc) {
let roles = userDoc.roles || [];
return $('<div>')
.addClass('users p-2 border-bottom')
.attr('data-user-id', userDoc._id || userDoc.objectID )
.append(
$('<h6>')
.addClass('mb-0 font-weight-bold')
.text(userDoc.full_name),
$('<small>')
.text(userDoc.username),
$('<small>')
.addClass('d-block roles text-info')
.text(roles.join(', '))
)
}
static canCreate(candidate) {
return !!candidate.username;
}
}

View File

@@ -1,46 +0,0 @@
import { Users } from '../Users'
describe('Users', () => {
let userDoc;
describe('create$listItem', () => {
beforeEach(()=>{
userDoc = {
_id: 'my-user-id',
username: 'My User Name',
full_name: 'My full name',
roles: ['admin', 'subscriber']
};
});
test('happy case', () => {
let $user = Users.create$listItem(userDoc);
expect($user.length).toBe(1);
expect($user.hasClass('users')).toBeTruthy();
expect($user.data('user-id')).toBe('my-user-id');
let $username = $user.find(':contains(My User Name)');
expect($username.length).toBe(1);
let $fullName = $user.find(':contains(My full name)');
expect($fullName.length).toBe(1);
let $roles = $user.find('.roles');
expect($roles.length).toBe(1);
expect($roles.text()).toBe('admin, subscriber')
});
})
describe('create$item', () => {
beforeEach(()=>{
userDoc = {
_id: 'my-user-id',
username: 'My User Name',
full_name: 'My full name',
roles: ['admin', 'subscriber']
};
});
test('Not Implemented', () => {
// Replace with proper test once implemented
expect(()=>Users.create$item(userDoc)).toThrow('Not Implemented');
});
})
});

View File

@@ -1,5 +0,0 @@
function thenLoadVideoProgress(nodeId) {
return $.get('/api/users/video/' + nodeId + '/progress')
}
export { thenLoadVideoProgress };

View File

@@ -1,67 +0,0 @@
import { prettyDate } from '../init'
describe('prettydate', () => {
beforeEach(() => {
Date.now = jest.fn(() => new Date(Date.UTC(2016,
10, //November! zero based month!
8, 11, 46, 30)).valueOf()); // A Tuesday
});
test('bad input', () => {
expect(prettyDate(undefined)).toBeUndefined();
expect(prettyDate(null)).toBeUndefined();
expect(prettyDate('my birthday')).toBeUndefined();
});
test('past dates',() => {
expect(pd({seconds: -5})).toBe('just now');
expect(pd({minutes: -5})).toBe('5m ago')
expect(pd({days: -7})).toBe('last Tuesday')
expect(pd({days: -8})).toBe('1 week ago')
expect(pd({days: -14})).toBe('2 weeks ago')
expect(pd({days: -31})).toBe('8 Oct')
expect(pd({days: -(31 + 366)})).toBe('8 Oct 2015')
});
test('past dates with time',() => {
expect(pd({seconds: -5, detailed: true})).toBe('just now');
expect(pd({minutes: -5, detailed: true})).toBe('5m ago')
expect(pd({days: -7, detailed: true})).toBe('last Tuesday at 11:46')
expect(pd({days: -8, detailed: true})).toBe('1 week ago at 11:46')
// summer time below
expect(pd({days: -14, detailed: true})).toBe('2 weeks ago at 10:46')
expect(pd({days: -31, detailed: true})).toBe('8 Oct at 10:46')
expect(pd({days: -(31 + 366), detailed: true})).toBe('8 Oct 2015 at 10:46')
});
test('future dates',() => {
expect(pd({seconds: 5})).toBe('just now')
expect(pd({minutes: 5})).toBe('in 5m')
expect(pd({days: 7})).toBe('next Tuesday')
expect(pd({days: 8})).toBe('in 1 week')
expect(pd({days: 14})).toBe('in 2 weeks')
expect(pd({days: 30})).toBe('8 Dec')
expect(pd({days: 30 + 365})).toBe('8 Dec 2017')
});
test('future dates',() => {
expect(pd({seconds: 5, detailed: true})).toBe('just now')
expect(pd({minutes: 5, detailed: true})).toBe('in 5m')
expect(pd({days: 7, detailed: true})).toBe('next Tuesday at 11:46')
expect(pd({days: 8, detailed: true})).toBe('in 1 week at 11:46')
expect(pd({days: 14, detailed: true})).toBe('in 2 weeks at 11:46')
expect(pd({days: 30, detailed: true})).toBe('8 Dec at 11:46')
expect(pd({days: 30 + 365, detailed: true})).toBe('8 Dec 2017 at 11:46')
});
function pd(params) {
let theDate = new Date(Date.now());
theDate.setFullYear(theDate.getFullYear() + (params['years'] || 0));
theDate.setMonth(theDate.getMonth() + (params['months'] || 0));
theDate.setDate(theDate.getDate() + (params['days'] || 0));
theDate.setHours(theDate.getHours() + (params['hours'] || 0));
theDate.setMinutes(theDate.getMinutes() + (params['minutes'] || 0));
theDate.setSeconds(theDate.getSeconds() + (params['seconds'] || 0));
return prettyDate(theDate, (params['detailed'] || false))
}
});

View File

@@ -1,34 +0,0 @@
class User{
constructor(kwargs) {
this.user_id = kwargs['user_id'] || '';
this.username = kwargs['username'] || '';
this.full_name = kwargs['full_name'] || '';
this.gravatar = kwargs['gravatar'] || '';
this.email = kwargs['email'] || '';
this.capabilities = kwargs['capabilities'] || [];
this.badges_html = kwargs['badges_html'] || '';
this.is_authenticated = kwargs['is_authenticated'] || false;
}
/**
* """Returns True iff the user has one or more of the given capabilities."""
* @param {...String} args
*/
hasCap(...args) {
for(let cap of args) {
if (this.capabilities.indexOf(cap) != -1) return true;
}
return false;
}
}
let currentUser;
function initCurrentUser(kwargs){
currentUser = new User(kwargs);
}
function getCurrentUser() {
return currentUser;
}
export { getCurrentUser, initCurrentUser }

View File

@@ -1,20 +0,0 @@
function thenLoadImage(imgId, size = 'm') {
return $.get('/api/files/' + imgId)
.then((resp)=> {
var show_variation = null;
if (typeof resp.variations != 'undefined') {
for (var variation of resp.variations) {
if (variation.size != size) continue;
show_variation = variation;
break;
}
}
if (show_variation == null) {
throw 'Image not found: ' + imgId + ' size: ' + size;
}
return show_variation;
})
}
export { thenLoadImage }

View File

@@ -1,36 +0,0 @@
export { transformPlaceholder } from './placeholder'
export { prettyDate } from './prettydate'
export { getCurrentUser, initCurrentUser } from './currentuser'
export { thenLoadImage } from './files'
export function debounced(fn, delay=1000) {
let timerId;
return function (...args) {
if (timerId) {
clearTimeout(timerId);
}
timerId = setTimeout(() => {
fn(...args);
timerId = null;
}, delay);
}
}
/**
* Extracts error message from error of type String, Error or xhrError
* @param {*} err
* @returns {String}
*/
export function messageFromError(err){
if (typeof err === "string") {
// type String
return err;
} else if(typeof err.message === "string") {
// type Error
return err.message;
} else {
// type xhr probably
return xhrErrorResponseMessage(err);
}
}

View File

@@ -1,15 +0,0 @@
/**
* Fade out placeholder, then call callback.
* Note that the placeholder will not be removed, and will not be keeped hidden. The caller decides what to do with
* the placeholder.
* @param {jQueryObject} $placeholder
* @param {callback} cb
*/
export function transformPlaceholder($placeholder, cb) {
$placeholder.addClass('placeholder replaced')
.delay(250)
.queue(()=>{
$placeholder.removeClass('placeholder replaced');
cb();
})
}

View File

@@ -1,97 +0,0 @@
export function prettyDate(time, detail=false) {
/**
* time is anything Date can parse, and we return a
pretty string like 'an hour ago', 'Yesterday', '3 months ago',
'just now', etc
*/
let theDate = new Date(time);
if (!time || isNaN(theDate)) {
return
}
let pretty = '';
let now = new Date(Date.now()); // Easier to mock Date.now() in tests
let second_diff = Math.round((now - theDate) / 1000);
let day_diff = Math.round(second_diff / 86400); // seconds per day (60*60*24)
if ((day_diff < 0) && (theDate.getFullYear() !== now.getFullYear())) {
// "Jul 16, 2018"
pretty = theDate.toLocaleDateString('en-NL',{day: 'numeric', month: 'short', year: 'numeric'});
}
else if ((day_diff < -21) && (theDate.getFullYear() == now.getFullYear())) {
// "Jul 16"
pretty = theDate.toLocaleDateString('en-NL',{day: 'numeric', month: 'short'});
}
else if (day_diff < -7){
let week_count = Math.round(-day_diff / 7);
if (week_count == 1)
pretty = "in 1 week";
else
pretty = "in " + week_count +" weeks";
}
else if (day_diff < 0)
// "next Tuesday"
pretty = 'next ' + theDate.toLocaleDateString('en-NL',{weekday: 'long'});
else if (day_diff === 0) {
if (second_diff < 0) {
let seconds = Math.abs(second_diff);
if (seconds < 10)
return 'just now';
if (seconds < 60)
return 'in ' + seconds +'s';
if (seconds < 120)
return 'in a minute';
if (seconds < 3600)
return 'in ' + Math.round(seconds / 60) + 'm';
if (seconds < 7200)
return 'in an hour';
if (seconds < 86400)
return 'in ' + Math.round(seconds / 3600) + 'h';
} else {
let seconds = second_diff;
if (seconds < 10)
return "just now";
if (seconds < 60)
return seconds + "s ago";
if (seconds < 120)
return "a minute ago";
if (seconds < 3600)
return Math.round(seconds / 60) + "m ago";
if (seconds < 7200)
return "an hour ago";
if (seconds < 86400)
return Math.round(seconds / 3600) + "h ago";
}
}
else if (day_diff == 1)
pretty = "yesterday";
else if (day_diff <= 7)
// "last Tuesday"
pretty = 'last ' + theDate.toLocaleDateString('en-NL',{weekday: 'long'});
else if (day_diff <= 22) {
let week_count = Math.round(day_diff / 7);
if (week_count == 1)
pretty = "1 week ago";
else
pretty = week_count + " weeks ago";
}
else if (theDate.getFullYear() === now.getFullYear())
// "Jul 16"
pretty = theDate.toLocaleDateString('en-NL',{day: 'numeric', month: 'short'});
else
// "Jul 16", 2009
pretty = theDate.toLocaleDateString('en-NL',{day: 'numeric', month: 'short', year: 'numeric'});
if (detail){
// "Tuesday at 04:20"
let paddedHour = ('00' + theDate.getUTCHours()).substr(-2);
let paddedMin = ('00' + theDate.getUTCMinutes()).substr(-2);
return pretty + ' at ' + paddedHour + ':' + paddedMin;
}
return pretty;
}

View File

@@ -1,35 +0,0 @@
# Vue components
[Vue.js](https://vuejs.org/) is a javascript framework for writing interactive ui components.
Vue.js is packed into tutti.js, and hence available site wide.
### Absolute must read
- https://vuejs.org/v2/api/#Options-Data
- https://vuejs.org/v2/api/#v-bind
- https://vuejs.org/v2/api/#v-model
- https://vuejs.org/v2/guide/conditional.html
- https://vuejs.org/v2/guide/list.html#v-for-with-an-Object
- https://vuejs.org/v2/api/#vm-emit
- https://vuejs.org/v2/api/#v-on
### Styling and animation of components
- https://vuejs.org/v2/guide/class-and-style.html#Binding-HTML-Classes
- https://vuejs.org/v2/guide/transitions.html
### More advanced, but important topics
- https://vuejs.org/v2/api/#is
- https://vuejs.org/v2/guide/components-slots.html#Slot-Content
- https://vuejs.org/v2/guide/mixins.html
### Rule of thumbs
- [Have a dash in your component name](https://vuejs.org/v2/guide/components-registration.html#Component-Names)
- Have one prop binding per line in component templates.
~~~
// Good!
<my-component
:propA="propX"
:propB="propY"
/>
// Bad!
<my-component :propA="propX" :propB="propY"/>
~~~

View File

@@ -1,52 +0,0 @@
const TEMPLATE = `
<div class='breadcrumbs' v-if="breadcrumbs.length">
<ul>
<li v-for="crumb in breadcrumbs">
<a :href="crumb.url" v-if="!crumb._self" @click.prevent="navigateToNode(crumb._id)">{{ crumb.name }}</a>
<span v-else>{{ crumb.name }}</span>
</li>
</ul>
</div>
`
Vue.component("node-breadcrumbs", {
template: TEMPLATE,
created() {
this.loadBreadcrumbs();
pillar.events.Nodes.onLoaded(event => {
this.nodeId = event.detail.nodeId;
});
},
props: {
nodeId: String,
},
data() { return {
breadcrumbs: [],
}},
watch: {
nodeId() {
this.loadBreadcrumbs();
},
},
methods: {
loadBreadcrumbs() {
// The node ID may not exist (when at project level, for example).
if (!this.nodeId) {
this.breadcrumbs = [];
return;
}
$.get(`/nodes/${this.nodeId}/breadcrumbs`)
.done(data => {
this.breadcrumbs = data.breadcrumbs;
})
.fail(error => {
toastr.error(xhrErrorResponseMessage(error), "Unable to load breadcrumbs");
})
;
},
navigateToNode(nodeId) {
this.$emit('navigate', nodeId);
},
},
});

View File

@@ -1,120 +0,0 @@
import { thenGetFileDocument, getFileVariation } from '../../api/files'
import { UnitOfWorkTracker } from '../mixins/UnitOfWorkTracker'
const VALID_NAME_REGEXP = /[a-zA-Z0-9_\-]+/g;
const NON_VALID_NAME_REGEXP = /[^a-zA-Z0-9_\-]+/g;
const TEMPLATE = `
<div class="attachment"
:class="{error: !isSlugOk}"
>
<div class="thumbnail-container"
@click="$emit('insert', oid)"
title="Click to add to comment"
>
<i :class="thumbnailBackup"
v-show="!thumbnail"
/>
<img class="preview-thumbnail"
v-if="!!thumbnail"
:src="thumbnail"
width=50
height=50
/>
</div>
<input class="form-control"
title="Slug"
v-model="newSlug"
/>
<div class="actions">
<div class="action delete"
@click="$emit('delete', oid)"
>
<i class="pi-trash"/>
Delete
</div>
</div>
</div>
`;
Vue.component('comment-attachment-editor', {
template: TEMPLATE,
mixins: [UnitOfWorkTracker],
props: {
slug: String,
allSlugs: Array,
oid: String
},
data() {
return {
newSlug: this.slug,
thumbnail: '',
thumbnailBackup: 'pi-spin spin',
}
},
computed: {
isValidAttachmentName() {
let regexpMatch = this.slug.match(VALID_NAME_REGEXP);
return !!regexpMatch && regexpMatch.length === 1 && regexpMatch[0] === this.slug;
},
isUnique() {
let countOccurrences = 0;
for (let s of this.allSlugs) {
// Don't worry about unicode. isValidAttachmentName denies those anyway
if (s.toUpperCase() === this.slug.toUpperCase()) {
countOccurrences++;
}
}
return countOccurrences === 1;
},
isSlugOk() {
return this.isValidAttachmentName && this.isUnique;
}
},
watch: {
newSlug(newValue, oldValue) {
this.$emit('rename', newValue, this.oid);
},
isSlugOk(newValue, oldValue) {
this.$emit('validation', this.oid, newValue);
}
},
created() {
this.newSlug = this.makeSafeAttachmentString(this.slug);
this.$emit('validation', this.oid, this.isSlugOk);
this.unitOfWork(
thenGetFileDocument(this.oid)
.then((fileDoc) => {
let content_type = fileDoc.content_type
if (content_type.startsWith('image')) {
try {
let imgFile = getFileVariation(fileDoc, 's');
this.thumbnail = imgFile.link;
} catch (error) {
this.thumbnailBackup = 'pi-image';
}
} else if(content_type.startsWith('video')) {
this.thumbnailBackup = 'pi-video';
} else {
this.thumbnailBackup = 'pi-file';
}
})
);
},
methods: {
/**
* Replaces all spaces with underscore and removes all o
* @param {String} unsafe
* @returns {String}
*/
makeSafeAttachmentString(unsafe) {
let candidate = (unsafe);
let matchSpace = / /g;
candidate = candidate
.replace(matchSpace, '_')
.replace(NON_VALID_NAME_REGEXP, '')
return candidate || `${this.oid}`
}
}
});

View File

@@ -1,168 +0,0 @@
import '../user/Avatar'
import '../utils/PrettyCreated'
import './CommentEditor'
import './Rating'
import { Linkable } from '../mixins/Linkable'
import { UnitOfWorkTracker } from '../mixins/UnitOfWorkTracker'
import { EventBus, Events } from './EventBus'
const TEMPLATE = `
<div class="comment-branch">
<div class="comment-container"
:class="{'is-first': !isReply, 'is-reply': isReply, 'comment-linked': isLinked}"
:id="comment.id">
<div class="comment-avatar">
<user-avatar
:user="comment.user"
/>
<div class="user-badges"
v-html="comment.user.badges_html">
</div>
</div>
<div class="comment-content">
<div class="comment-body"
v-if="!isUpdating"
>
<p class="comment-author">
{{ comment.user.full_name }}
</p>
<span class="comment-msg">
<p v-html="comment.msg_html"/>
</span>
</div>
<comment-editor
v-if="isUpdating"
@unit-of-work="childUnitOfWork"
:mode="editorMode"
:comment="comment"
:user="user"
:parentId="comment.id"
/>
<div class="comment-meta">
<comment-rating
:comment="comment"
@unit-of-work="childUnitOfWork"
/>
<div class="comment-action">
<span class="action" title="Reply to this comment"
v-if="canReply"
@click="showReplyEditor"
>
Reply
</span>
<span class="action" title="Edit comment"
v-if="canUpdate"
@click="showUpdateEditor"
>
Edit
</span>
<span class="action" title="Cancel changes"
v-if="canCancel"
@click="cancleEdit"
>
<i class="pi-cancel"></i>Cancel
</span>
</div>
<pretty-created
:created="comment.created"
:updated="comment.updated"
/>
</div>
</div>
</div>
<div class="comment-reply-container is-reply"
v-if="isReplying"
>
<user-avatar
:user="user"
/>
<comment-editor
v-if="isReplying"
@unit-of-work="childUnitOfWork"
:mode="editorMode"
:comment="comment"
:user="user"
:parentId="comment.id"
/>
</div>
<div class="comments-list">
<comment
v-for="c in comment.replies"
@unit-of-work="childUnitOfWork"
:isReply="true"
:readOnly="readOnly"
:comment="c"
:user="user"
:key="c.id"/>
</div>
</div>
`;
Vue.component('comment', {
template: TEMPLATE,
mixins: [Linkable, UnitOfWorkTracker],
props: {
user: Object,
comment: Object,
readOnly: {
type: Boolean,
default: false,
},
isReply: {
type: Boolean,
default: false,
},
},
data() {
return {
isReplying: false,
isUpdating: false,
id: this.comment.id,
}
},
computed: {
canUpdate() {
return !this.readOnly && this.comment.user.id === this.user.user_id && !this.isUpdating && !this.isReplying;
},
canReply() {
return !this.readOnly && !this.isUpdating && !this.isReplying;
},
canCancel() {
return this.isReplying || this.isUpdating;
},
editorMode() {
if(this.isReplying) {
return 'reply';
}
if(this.isUpdating) {
return 'update';
}
}
},
created() {
EventBus.$on(Events.BEFORE_SHOW_EDITOR, this.doHideEditors);
EventBus.$on(Events.EDIT_DONE, this.doHideEditors);
},
beforeDestroy() {
EventBus.$off(Events.BEFORE_SHOW_EDITOR, this.doHideEditors);
EventBus.$off(Events.EDIT_DONE, this.doHideEditors);
},
methods: {
showReplyEditor() {
EventBus.$emit(Events.BEFORE_SHOW_EDITOR, this.comment.id );
this.isReplying = true;
},
showUpdateEditor() {
EventBus.$emit(Events.BEFORE_SHOW_EDITOR, this.comment.id );
this.isUpdating = true;
},
cancleEdit() {
this.doHideEditors();
EventBus.$emit(Events.EDIT_DONE);
},
doHideEditors() {
this.isReplying = false;
this.isUpdating = false;
},
}
});

Some files were not shown because too many files have changed in this diff Show More