Compare commits
278 Commits
wip-open-p
...
wip-commen
Author | SHA1 | Date | |
---|---|---|---|
d0e12401c0 | |||
411a6f75c5 | |||
07821c7f97 | |||
64b4ce3ba9 | |||
72417a9abb | |||
6ae9a5ddeb | |||
a897e201ba | |||
3985a00c6f | |||
119291f817 | |||
801cda88bf | |||
fc99713732 | |||
1d909faf49 | |||
ed35c54361 | |||
411b15b1a0 | |||
9b85a938f3 | |||
989a40a7f7 | |||
64cc4dc9bf | |||
9182188647 | |||
5896f4cfdd | |||
f9a407054d | |||
1c46e4c96b | |||
2990738b5d | |||
e2432f6e9f | |||
aa63389b4f | |||
5075cd5bd0 | |||
ceef04455c | |||
c8e62e3610 | |||
ce7cf52d70 | |||
dc2105fbb8 | |||
71185af880 | |||
041f8914b2 | |||
b4ee5b59bd | |||
314ce40e71 | |||
7e941e2299 | |||
53811363ce | |||
51057e4d63 | |||
a1a48c1941 | |||
19fdc75e60 | |||
879bcffc2b | |||
6ad12d0098 | |||
a738cdcad8 | |||
199f37c5d7 | |||
4cf93f00f6 | |||
eaf9235fa9 | |||
24ecf36896 | |||
86aa494aed | |||
5a5b97d362 | |||
831858a336 | |||
e9d247fe97 | |||
1ddd8525c7 | |||
c43941807c | |||
bbad8eb5c5 | |||
04f00cdd4f | |||
66d9fd0908 | |||
516ef2ddc7 | |||
35fb07ee64 | |||
f1d67894dc | |||
aef2cf8c2d | |||
d347ddac2c | |||
186ba167f1 | |||
847e97fe8c | |||
7ace5f4292 | |||
6cb85b06dc | |||
5c019e8d1c | |||
7796179021 | |||
26aca917c8 | |||
e262a5c240 | |||
e079ac4da1 | |||
83097cf473 | |||
f4ade9cda7 | |||
31244a89e5 | |||
749c3dbd58 | |||
b1d97e723f | |||
46bdd4f51c | |||
93720e226c | |||
9a0da126e6 | |||
45672565e9 | |||
3e1273d56c | |||
fe86f76617 | |||
008d9b8880 | |||
13b606df45 | |||
57f5836829 | |||
e40ba69872 | |||
0aeae2cabd | |||
601b94e23a | |||
00c4ec8741 | |||
caee114d48 | |||
7fccf02e68 | |||
1c42e8fd07 | |||
77f855be3e | |||
cede3e75db | |||
02a7014bf4 | |||
04e51a9d3f | |||
d4fd6b5cda | |||
2935b442d8 | |||
567247f3fd | |||
def52944bf | |||
8753a12dee | |||
77e3c476f0 | |||
842ddaeab0 | |||
85e5cb4f71 | |||
6648f8d074 | |||
a5bc36b1cf | |||
e56b3ec61f | |||
9624f6bd76 | |||
4e5a53a19b | |||
fbc7c0fce7 | |||
bb483e72aa | |||
baf27fa560 | |||
845ba953cb | |||
e5b7905a5c | |||
88c0ef0e7c | |||
f8d992400e | |||
263d68071e | |||
0f7f7d5a66 | |||
6b29c70212 | |||
07670dce96 | |||
fe288b1cc2 | |||
2e9555e160 | |||
b0311af6b5 | |||
35a22cab4b | |||
0055633732 | |||
78b186c8e4 | |||
232321cc2c | |||
a6d662b690 | |||
32c7ffbc99 | |||
cfcc629b61 | |||
8ea0310956 | |||
c1958d2da7 | |||
030c5494a8 | |||
462f31406a | |||
1a1f67cf00 | |||
8d5bdf04aa | |||
9a9d15ce47 | |||
c795015a3c | |||
afda0062f5 | |||
a97c8ffc93 | |||
c5fa6b9535 | |||
2be41a7145 | |||
e8fb77c39b | |||
40933d51cf | |||
9a9ca1bf8b | |||
0983474e76 | |||
6bcce87bb9 | |||
1401a6168f | |||
85eab0c6cb | |||
a753637e70 | |||
f87c7a25df | |||
3ae16d7750 | |||
c546dd2881 | |||
48df0583ab | |||
094d15116e | |||
534d06ca8f | |||
df078b395d | |||
5df92ca4cf | |||
ecace8c55b | |||
bcacdfb7ea | |||
d7fd90ded1 | |||
b9268337c3 | |||
9b62daec74 | |||
5cc5698477 | |||
00ba98d279 | |||
e818c92d4e | |||
612862c048 | |||
6b3f025e16 | |||
8a90cd00e9 | |||
17a69b973e | |||
8380270128 | |||
35225a189d | |||
be98a95fc0 | |||
95c1f913c6 | |||
9bcd6cec89 | |||
4532c1ea39 | |||
e19dd27099 | |||
f54e56bad8 | |||
eb851ce6e1 | |||
586d9c0d3b | |||
ac23c7b00b | |||
811edc5a2a | |||
cb95bf989a | |||
e4fa32b8e4 | |||
08bf63c2ee | |||
0baf5b38c3 | |||
858a75af8d | |||
6b1a5e24e8 | |||
1500e20291 | |||
d347534fea | |||
4546469d37 | |||
b0d8da821f | |||
1821bb6b7d | |||
278eebd235 | |||
2777c37085 | |||
5e07cfb9b2 | |||
bc16bb6e56 | |||
0fcafddbd1 | |||
f29e01c78e | |||
2698be3e12 | |||
9c2ded79dd | |||
b4acfb89fa | |||
3f8e0396cf | |||
05c488c484 | |||
33bd2c5880 | |||
76338b4568 | |||
7405e198eb | |||
2332bc0960 | |||
ac3a599bb6 | |||
814275fc95 | |||
40c19a3cb0 | |||
a67527d6af | |||
791906521f | |||
2ad5b20880 | |||
f6fd9228e5 | |||
e9f303f330 | |||
00a7406a1e | |||
82aa521b5f | |||
f7220924bc | |||
46b0d6d663 | |||
595bb48741 | |||
1c430044b9 | |||
73bc084417 | |||
37ca803162 | |||
939bb97f13 | |||
2c40665271 | |||
e8123b7839 | |||
6d6a40b8c0 | |||
efd345ec46 | |||
d655d2b749 | |||
a58e616769 | |||
a8a7166e78 | |||
1649591d75 | |||
9389fef8ba | |||
6737aa1123 | |||
40f79af49d | |||
84608500b9 | |||
819300f954 | |||
b569829343 | |||
c35fb6202b | |||
d0ff519980 | |||
6ff4ee8fa1 | |||
b5535a8773 | |||
2ded541955 | |||
3965061bde | |||
5238e2c26d | |||
469f24d113 | |||
8a0f582a80 | |||
559e212c55 | |||
61278730c6 | |||
0fdcbc3947 | |||
8dc3296bd5 | |||
a699138fd6 | |||
466adabbb0 | |||
7da741f354 | |||
41369d134c | |||
61ed083218 | |||
46777f7f8c | |||
ef94c68177 | |||
aaf452e18b | |||
c607eaf23d | |||
baa77a7de5 | |||
5fb40eb32b | |||
c83a1a21b8 | |||
549cf0a3e8 | |||
9f380751f5 | |||
49075cbc60 | |||
81848c2c44 | |||
9ee7b742ab | |||
58c33074c3 | |||
756427b34e | |||
7e06212cd5 | |||
ef3912b647 | |||
151484dee3 | |||
bec1f209ba | |||
0e14bdd09f | |||
ce6df542cc | |||
530302b74f | |||
1bfb6cd2f6 | |||
53b6210531 | |||
aeaa03ed80 |
2
.gitignore
vendored
2
.gitignore
vendored
@@ -12,6 +12,7 @@ config_local.py
|
||||
|
||||
/build
|
||||
/.cache
|
||||
/.pytest_cache/
|
||||
/*.egg-info/
|
||||
profile.stats
|
||||
/dump/
|
||||
@@ -26,6 +27,7 @@ profile.stats
|
||||
|
||||
pillar/web/static/assets/css/*.css
|
||||
pillar/web/static/assets/js/*.min.js
|
||||
pillar/web/static/assets/js/vendor/video.min.js
|
||||
pillar/web/static/storage/
|
||||
pillar/web/static/uploads/
|
||||
pillar/web/templates/
|
||||
|
@@ -65,6 +65,12 @@ You can run the Celery Worker using `manage.py celery worker`.
|
||||
|
||||
Find other Celery operations with the `manage.py celery` command.
|
||||
|
||||
## Elasticsearch
|
||||
|
||||
Pillar uses [Elasticsearch](https://www.elastic.co/products/elasticsearch) to power the search engine.
|
||||
You will need to run the `manage.py elastic reset_index` command to initialize the indexing.
|
||||
If you need to reindex your documents in elastic you run the `manage.py elastic reindex` command.
|
||||
|
||||
## Translations
|
||||
|
||||
If the language you want to support doesn't exist, you need to run: `translations init es_AR`.
|
||||
|
180
gulpfile.js
180
gulpfile.js
@@ -1,37 +1,50 @@
|
||||
var argv = require('minimist')(process.argv.slice(2));
|
||||
var autoprefixer = require('gulp-autoprefixer');
|
||||
var cache = require('gulp-cached');
|
||||
var chmod = require('gulp-chmod');
|
||||
var concat = require('gulp-concat');
|
||||
var git = require('gulp-git');
|
||||
var gulpif = require('gulp-if');
|
||||
var gulp = require('gulp');
|
||||
var livereload = require('gulp-livereload');
|
||||
var plumber = require('gulp-plumber');
|
||||
var pug = require('gulp-pug');
|
||||
var rename = require('gulp-rename');
|
||||
var sass = require('gulp-sass');
|
||||
var sourcemaps = require('gulp-sourcemaps');
|
||||
var uglify = require('gulp-uglify');
|
||||
let argv = require('minimist')(process.argv.slice(2));
|
||||
let autoprefixer = require('gulp-autoprefixer');
|
||||
let cache = require('gulp-cached');
|
||||
let chmod = require('gulp-chmod');
|
||||
let concat = require('gulp-concat');
|
||||
let git = require('gulp-git');
|
||||
let gulpif = require('gulp-if');
|
||||
let gulp = require('gulp');
|
||||
let livereload = require('gulp-livereload');
|
||||
let plumber = require('gulp-plumber');
|
||||
let pug = require('gulp-pug');
|
||||
let rename = require('gulp-rename');
|
||||
let sass = require('gulp-sass');
|
||||
let sourcemaps = require('gulp-sourcemaps');
|
||||
let uglify = require('gulp-uglify-es').default;
|
||||
let browserify = require('browserify');
|
||||
let babelify = require('babelify');
|
||||
let sourceStream = require('vinyl-source-stream');
|
||||
let glob = require('glob');
|
||||
let es = require('event-stream');
|
||||
let path = require('path');
|
||||
let buffer = require('vinyl-buffer');
|
||||
|
||||
var enabled = {
|
||||
let enabled = {
|
||||
uglify: argv.production,
|
||||
maps: argv.production,
|
||||
maps: !argv.production,
|
||||
failCheck: !argv.production,
|
||||
prettyPug: !argv.production,
|
||||
cachify: !argv.production,
|
||||
cleanup: argv.production,
|
||||
chmod: argv.production,
|
||||
};
|
||||
|
||||
var destination = {
|
||||
let destination = {
|
||||
css: 'pillar/web/static/assets/css',
|
||||
pug: 'pillar/web/templates',
|
||||
js: 'pillar/web/static/assets/js',
|
||||
}
|
||||
|
||||
let source = {
|
||||
bootstrap: 'node_modules/bootstrap/',
|
||||
jquery: 'node_modules/jquery/',
|
||||
popper: 'node_modules/popper.js/'
|
||||
}
|
||||
|
||||
/* CSS */
|
||||
gulp.task('styles', function() {
|
||||
/* Stylesheets */
|
||||
gulp.task('styles', function(done) {
|
||||
gulp.src('src/styles/**/*.sass')
|
||||
.pipe(gulpif(enabled.failCheck, plumber()))
|
||||
.pipe(gulpif(enabled.maps, sourcemaps.init()))
|
||||
@@ -42,11 +55,12 @@ gulp.task('styles', function() {
|
||||
.pipe(gulpif(enabled.maps, sourcemaps.write(".")))
|
||||
.pipe(gulp.dest(destination.css))
|
||||
.pipe(gulpif(argv.livereload, livereload()));
|
||||
done();
|
||||
});
|
||||
|
||||
|
||||
/* Templates - Pug */
|
||||
gulp.task('templates', function() {
|
||||
/* Templates */
|
||||
gulp.task('templates', function(done) {
|
||||
gulp.src('src/templates/**/*.pug')
|
||||
.pipe(gulpif(enabled.failCheck, plumber()))
|
||||
.pipe(gulpif(enabled.cachify, cache('templating')))
|
||||
@@ -55,11 +69,12 @@ gulp.task('templates', function() {
|
||||
}))
|
||||
.pipe(gulp.dest(destination.pug))
|
||||
.pipe(gulpif(argv.livereload, livereload()));
|
||||
done();
|
||||
});
|
||||
|
||||
|
||||
/* Individual Uglified Scripts */
|
||||
gulp.task('scripts', function() {
|
||||
gulp.task('scripts', function(done) {
|
||||
gulp.src('src/scripts/*.js')
|
||||
.pipe(gulpif(enabled.failCheck, plumber()))
|
||||
.pipe(gulpif(enabled.cachify, cache('scripting')))
|
||||
@@ -67,56 +82,114 @@ gulp.task('scripts', function() {
|
||||
.pipe(gulpif(enabled.uglify, uglify()))
|
||||
.pipe(rename({suffix: '.min'}))
|
||||
.pipe(gulpif(enabled.maps, sourcemaps.write(".")))
|
||||
.pipe(chmod(644))
|
||||
.pipe(gulpif(enabled.chmod, chmod(0o644)))
|
||||
.pipe(gulp.dest(destination.js))
|
||||
.pipe(gulpif(argv.livereload, livereload()));
|
||||
done();
|
||||
});
|
||||
|
||||
function browserify_base(entry) {
|
||||
let pathSplited = path.dirname(entry).split(path.sep);
|
||||
let moduleName = pathSplited[pathSplited.length - 1];
|
||||
return browserify({
|
||||
entries: [entry],
|
||||
standalone: 'pillar.' + moduleName,
|
||||
})
|
||||
.transform(babelify, { "presets": ["@babel/preset-env"] })
|
||||
.bundle()
|
||||
.pipe(gulpif(enabled.failCheck, plumber()))
|
||||
.pipe(sourceStream(path.basename(entry)))
|
||||
.pipe(buffer())
|
||||
.pipe(rename({
|
||||
basename: moduleName,
|
||||
extname: '.min.js'
|
||||
}));
|
||||
}
|
||||
|
||||
function browserify_common() {
|
||||
return glob.sync('src/scripts/js/es6/common/**/init.js').map(browserify_base);
|
||||
}
|
||||
|
||||
gulp.task('scripts_browserify', function(done) {
|
||||
glob('src/scripts/js/es6/individual/**/init.js', function(err, files) {
|
||||
if(err) done(err);
|
||||
|
||||
var tasks = files.map(function(entry) {
|
||||
return browserify_base(entry)
|
||||
.pipe(gulpif(enabled.maps, sourcemaps.init()))
|
||||
.pipe(gulpif(enabled.uglify, uglify()))
|
||||
.pipe(gulpif(enabled.maps, sourcemaps.write(".")))
|
||||
.pipe(gulp.dest(destination.js));
|
||||
});
|
||||
|
||||
es.merge(tasks).on('end', done);
|
||||
})
|
||||
});
|
||||
|
||||
|
||||
/* Collection of scripts in src/scripts/tutti/ to merge into tutti.min.js */
|
||||
/* Since it's always loaded, it's only for functions that we want site-wide */
|
||||
gulp.task('scripts_concat_tutti', function() {
|
||||
gulp.src('src/scripts/tutti/**/*.js')
|
||||
/* Collection of scripts in src/scripts/tutti/ to merge into tutti.min.js
|
||||
* Since it's always loaded, it's only for functions that we want site-wide.
|
||||
* It also includes jQuery and Bootstrap (and its dependency popper), since
|
||||
* the site doesn't work without it anyway.*/
|
||||
gulp.task('scripts_concat_tutti', function(done) {
|
||||
|
||||
let toUglify = [
|
||||
source.jquery + 'dist/jquery.min.js',
|
||||
source.popper + 'dist/umd/popper.min.js',
|
||||
source.bootstrap + 'js/dist/index.js',
|
||||
source.bootstrap + 'js/dist/util.js',
|
||||
source.bootstrap + 'js/dist/alert.js',
|
||||
source.bootstrap + 'js/dist/collapse.js',
|
||||
source.bootstrap + 'js/dist/dropdown.js',
|
||||
source.bootstrap + 'js/dist/tooltip.js',
|
||||
'src/scripts/tutti/**/*.js'
|
||||
];
|
||||
|
||||
es.merge(gulp.src(toUglify), ...browserify_common())
|
||||
.pipe(gulpif(enabled.failCheck, plumber()))
|
||||
.pipe(gulpif(enabled.maps, sourcemaps.init()))
|
||||
.pipe(concat("tutti.min.js"))
|
||||
.pipe(gulpif(enabled.uglify, uglify()))
|
||||
.pipe(gulpif(enabled.maps, sourcemaps.write(".")))
|
||||
.pipe(chmod(644))
|
||||
.pipe(gulpif(enabled.chmod, chmod(0o644)))
|
||||
.pipe(gulp.dest(destination.js))
|
||||
.pipe(gulpif(argv.livereload, livereload()));
|
||||
done();
|
||||
});
|
||||
|
||||
gulp.task('scripts_concat_markdown', function() {
|
||||
gulp.src('src/scripts/markdown/**/*.js')
|
||||
.pipe(gulpif(enabled.failCheck, plumber()))
|
||||
.pipe(gulpif(enabled.maps, sourcemaps.init()))
|
||||
.pipe(concat("markdown.min.js"))
|
||||
.pipe(gulpif(enabled.uglify, uglify()))
|
||||
.pipe(gulpif(enabled.maps, sourcemaps.write(".")))
|
||||
.pipe(chmod(644))
|
||||
.pipe(gulp.dest(destination.js))
|
||||
.pipe(gulpif(argv.livereload, livereload()));
|
||||
|
||||
/* Simply move these vendor scripts from node_modules. */
|
||||
gulp.task('scripts_move_vendor', function(done) {
|
||||
|
||||
let toMove = [
|
||||
'node_modules/video.js/dist/video.min.js',
|
||||
];
|
||||
|
||||
gulp.src(toMove)
|
||||
.pipe(gulp.dest(destination.js + '/vendor/'));
|
||||
done();
|
||||
});
|
||||
|
||||
|
||||
// While developing, run 'gulp watch'
|
||||
gulp.task('watch',function() {
|
||||
gulp.task('watch',function(done) {
|
||||
// Only listen for live reloads if ran with --livereload
|
||||
if (argv.livereload){
|
||||
livereload.listen();
|
||||
}
|
||||
|
||||
gulp.watch('src/styles/**/*.sass',['styles']);
|
||||
gulp.watch('src/templates/**/*.pug',['templates']);
|
||||
gulp.watch('src/scripts/*.js',['scripts']);
|
||||
gulp.watch('src/scripts/tutti/**/*.js',['scripts_concat_tutti']);
|
||||
gulp.watch('src/scripts/markdown/**/*.js',['scripts_concat_markdown']);
|
||||
gulp.watch('src/styles/**/*.sass',gulp.series('styles'));
|
||||
gulp.watch('src/templates/**/*.pug',gulp.series('templates'));
|
||||
gulp.watch('src/scripts/*.js',gulp.series('scripts'));
|
||||
gulp.watch('src/scripts/tutti/**/*.js',gulp.series('scripts_concat_tutti'));
|
||||
gulp.watch('src/scripts/js/**/*.js',gulp.series(['scripts_browserify', 'scripts_concat_tutti']));
|
||||
done();
|
||||
});
|
||||
|
||||
|
||||
// Erases all generated files in output directories.
|
||||
gulp.task('cleanup', function() {
|
||||
var paths = [];
|
||||
gulp.task('cleanup', function(done) {
|
||||
let paths = [];
|
||||
for (attr in destination) {
|
||||
paths.push(destination[attr]);
|
||||
}
|
||||
@@ -124,17 +197,20 @@ gulp.task('cleanup', function() {
|
||||
git.clean({ args: '-f -X ' + paths.join(' ') }, function (err) {
|
||||
if(err) throw err;
|
||||
});
|
||||
|
||||
done();
|
||||
});
|
||||
|
||||
|
||||
// Run 'gulp' to build everything at once
|
||||
var tasks = [];
|
||||
let tasks = [];
|
||||
if (enabled.cleanup) tasks.push('cleanup');
|
||||
gulp.task('default', tasks.concat([
|
||||
// gulp.task('default', gulp.parallel('styles', 'templates', 'scripts', 'scripts_tutti'));
|
||||
|
||||
gulp.task('default', gulp.parallel(tasks.concat([
|
||||
'styles',
|
||||
'templates',
|
||||
'scripts',
|
||||
'scripts_concat_tutti',
|
||||
'scripts_concat_markdown',
|
||||
]));
|
||||
'scripts_move_vendor',
|
||||
'scripts_browserify',
|
||||
])));
|
||||
|
180
jest.config.js
Normal file
180
jest.config.js
Normal file
@@ -0,0 +1,180 @@
|
||||
// For a detailed explanation regarding each configuration property, visit:
|
||||
// https://jestjs.io/docs/en/configuration.html
|
||||
|
||||
module.exports = {
|
||||
// All imported modules in your tests should be mocked automatically
|
||||
// automock: false,
|
||||
|
||||
// Stop running tests after the first failure
|
||||
// bail: false,
|
||||
|
||||
// Respect "browser" field in package.json when resolving modules
|
||||
// browser: false,
|
||||
|
||||
// The directory where Jest should store its cached dependency information
|
||||
// cacheDirectory: "/tmp/jest_rs",
|
||||
|
||||
// Automatically clear mock calls and instances between every test
|
||||
clearMocks: true,
|
||||
|
||||
// Indicates whether the coverage information should be collected while executing the test
|
||||
// collectCoverage: false,
|
||||
|
||||
// An array of glob patterns indicating a set of files for which coverage information should be collected
|
||||
// collectCoverageFrom: null,
|
||||
|
||||
// The directory where Jest should output its coverage files
|
||||
// coverageDirectory: null,
|
||||
|
||||
// An array of regexp pattern strings used to skip coverage collection
|
||||
// coveragePathIgnorePatterns: [
|
||||
// "/node_modules/"
|
||||
// ],
|
||||
|
||||
// A list of reporter names that Jest uses when writing coverage reports
|
||||
// coverageReporters: [
|
||||
// "json",
|
||||
// "text",
|
||||
// "lcov",
|
||||
// "clover"
|
||||
// ],
|
||||
|
||||
// An object that configures minimum threshold enforcement for coverage results
|
||||
// coverageThreshold: null,
|
||||
|
||||
// Make calling deprecated APIs throw helpful error messages
|
||||
// errorOnDeprecated: false,
|
||||
|
||||
// Force coverage collection from ignored files usin a array of glob patterns
|
||||
// forceCoverageMatch: [],
|
||||
|
||||
// A path to a module which exports an async function that is triggered once before all test suites
|
||||
// globalSetup: null,
|
||||
|
||||
// A path to a module which exports an async function that is triggered once after all test suites
|
||||
// globalTeardown: null,
|
||||
|
||||
// A set of global variables that need to be available in all test environments
|
||||
// globals: {},
|
||||
|
||||
// An array of directory names to be searched recursively up from the requiring module's location
|
||||
// moduleDirectories: [
|
||||
// "node_modules"
|
||||
// ],
|
||||
|
||||
// An array of file extensions your modules use
|
||||
// moduleFileExtensions: [
|
||||
// "js",
|
||||
// "json",
|
||||
// "jsx",
|
||||
// "node"
|
||||
// ],
|
||||
|
||||
// A map from regular expressions to module names that allow to stub out resources with a single module
|
||||
// moduleNameMapper: {},
|
||||
|
||||
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
|
||||
// modulePathIgnorePatterns: [],
|
||||
|
||||
// Activates notifications for test results
|
||||
// notify: false,
|
||||
|
||||
// An enum that specifies notification mode. Requires { notify: true }
|
||||
// notifyMode: "always",
|
||||
|
||||
// A preset that is used as a base for Jest's configuration
|
||||
// preset: null,
|
||||
|
||||
// Run tests from one or more projects
|
||||
// projects: null,
|
||||
|
||||
// Use this configuration option to add custom reporters to Jest
|
||||
// reporters: undefined,
|
||||
|
||||
// Automatically reset mock state between every test
|
||||
// resetMocks: false,
|
||||
|
||||
// Reset the module registry before running each individual test
|
||||
// resetModules: false,
|
||||
|
||||
// A path to a custom resolver
|
||||
// resolver: null,
|
||||
|
||||
// Automatically restore mock state between every test
|
||||
// restoreMocks: false,
|
||||
|
||||
// The root directory that Jest should scan for tests and modules within
|
||||
// rootDir: null,
|
||||
|
||||
// A list of paths to directories that Jest should use to search for files in
|
||||
// roots: [
|
||||
// "<rootDir>"
|
||||
// ],
|
||||
|
||||
// Allows you to use a custom runner instead of Jest's default test runner
|
||||
// runner: "jest-runner",
|
||||
|
||||
// The paths to modules that run some code to configure or set up the testing environment before each test
|
||||
setupFiles: ["<rootDir>/src/scripts/js/es6/test_config/test-env.js"],
|
||||
|
||||
// The path to a module that runs some code to configure or set up the testing framework before each test
|
||||
// setupTestFrameworkScriptFile: null,
|
||||
|
||||
// A list of paths to snapshot serializer modules Jest should use for snapshot testing
|
||||
// snapshotSerializers: [],
|
||||
|
||||
// The test environment that will be used for testing
|
||||
testEnvironment: "jsdom",
|
||||
|
||||
// Options that will be passed to the testEnvironment
|
||||
// testEnvironmentOptions: {},
|
||||
|
||||
// Adds a location field to test results
|
||||
// testLocationInResults: false,
|
||||
|
||||
// The glob patterns Jest uses to detect test files
|
||||
// testMatch: [
|
||||
// "**/__tests__/**/*.js?(x)",
|
||||
// "**/?(*.)+(spec|test).js?(x)"
|
||||
// ],
|
||||
|
||||
// An array of regexp pattern strings that are matched against all test paths, matched tests are skipped
|
||||
// testPathIgnorePatterns: [
|
||||
// "/node_modules/"
|
||||
// ],
|
||||
|
||||
// The regexp pattern Jest uses to detect test files
|
||||
// testRegex: "",
|
||||
|
||||
// This option allows the use of a custom results processor
|
||||
// testResultsProcessor: null,
|
||||
|
||||
// This option allows use of a custom test runner
|
||||
// testRunner: "jasmine2",
|
||||
|
||||
// This option sets the URL for the jsdom environment. It is reflected in properties such as location.href
|
||||
// testURL: "http://localhost",
|
||||
|
||||
// Setting this value to "fake" allows the use of fake timers for functions such as "setTimeout"
|
||||
// timers: "real",
|
||||
|
||||
// A map from regular expressions to paths to transformers
|
||||
// transform: null,
|
||||
|
||||
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
|
||||
// transformIgnorePatterns: [
|
||||
// "/node_modules/"
|
||||
// ],
|
||||
|
||||
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
|
||||
// unmockedModulePathPatterns: undefined,
|
||||
|
||||
// Indicates whether each individual test should be reported during the run
|
||||
// verbose: null,
|
||||
|
||||
// An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode
|
||||
// watchPathIgnorePatterns: [],
|
||||
|
||||
// Whether to use watchman for file crawling
|
||||
// watchman: true,
|
||||
};
|
12797
package-lock.json
generated
Normal file
12797
package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
68
package.json
68
package.json
@@ -1,26 +1,46 @@
|
||||
{
|
||||
"name": "pillar",
|
||||
"license": "GPL-2.0+",
|
||||
"author": "Blender Institute",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/armadillica/pillar.git"
|
||||
},
|
||||
"devDependencies": {
|
||||
"gulp": "~3.9.1",
|
||||
"gulp-autoprefixer": "~2.3.1",
|
||||
"gulp-cached": "~1.1.0",
|
||||
"gulp-chmod": "~1.3.0",
|
||||
"gulp-concat": "~2.6.0",
|
||||
"gulp-if": "^2.0.1",
|
||||
"gulp-git": "~2.4.2",
|
||||
"gulp-livereload": "~3.8.1",
|
||||
"gulp-plumber": "~1.1.0",
|
||||
"gulp-pug": "~3.2.0",
|
||||
"gulp-rename": "~1.2.2",
|
||||
"gulp-sass": "~2.3.1",
|
||||
"gulp-sourcemaps": "~1.6.0",
|
||||
"gulp-uglify": "~1.5.3",
|
||||
"minimist": "^1.2.0"
|
||||
}
|
||||
"name": "pillar",
|
||||
"license": "GPL-2.0+",
|
||||
"author": "Blender Institute",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://git.blender.org/pillar.git"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "7.1.6",
|
||||
"@babel/preset-env": "7.1.6",
|
||||
"acorn": "5.7.3",
|
||||
"babel-core": "7.0.0-bridge.0",
|
||||
"babelify": "10.0.0",
|
||||
"browserify": "16.2.3",
|
||||
"gulp": "4.0.0",
|
||||
"gulp-autoprefixer": "6.0.0",
|
||||
"gulp-babel": "8.0.0",
|
||||
"gulp-cached": "1.1.1",
|
||||
"gulp-chmod": "2.0.0",
|
||||
"gulp-concat": "2.6.1",
|
||||
"gulp-git": "2.8.0",
|
||||
"gulp-if": "2.0.2",
|
||||
"gulp-livereload": "4.0.0",
|
||||
"gulp-plumber": "1.2.0",
|
||||
"gulp-pug": "4.0.1",
|
||||
"gulp-rename": "1.4.0",
|
||||
"gulp-sass": "4.0.1",
|
||||
"gulp-sourcemaps": "2.6.4",
|
||||
"gulp-uglify-es": "1.0.4",
|
||||
"jest": "23.6.0",
|
||||
"minimist": "1.2.0",
|
||||
"vinyl-buffer": "1.0.1",
|
||||
"vinyl-source-stream": "2.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"bootstrap": "4.1.3",
|
||||
"glob": "7.1.3",
|
||||
"jquery": "3.3.1",
|
||||
"popper.js": "1.14.4",
|
||||
"video.js": "7.2.2"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "jest"
|
||||
}
|
||||
}
|
||||
|
@@ -140,8 +140,6 @@ class PillarServer(BlinkerCompatibleEve):
|
||||
|
||||
self.org_manager = pillar.api.organizations.OrgManager()
|
||||
|
||||
self.before_first_request(self.setup_db_indices)
|
||||
|
||||
# Make CSRF protection available to the application. By default it is
|
||||
# disabled on all endpoints. More info at WTF_CSRF_CHECK_DEFAULT in config.py
|
||||
self.csrf = CSRFProtect(self)
|
||||
@@ -184,7 +182,6 @@ class PillarServer(BlinkerCompatibleEve):
|
||||
|
||||
if not self.config.get('STATIC_FILE_HASH'):
|
||||
self.log.warning('STATIC_FILE_HASH is empty, generating random one')
|
||||
f = open('/data/git/blender-cloud/config_local.py', 'a')
|
||||
h = re.sub(r'[_.~-]', '', secrets.token_urlsafe())[:8]
|
||||
self.config['STATIC_FILE_HASH'] = h
|
||||
|
||||
@@ -281,7 +278,7 @@ class PillarServer(BlinkerCompatibleEve):
|
||||
self.encoding_service_client = Zencoder(self.config['ZENCODER_API_KEY'])
|
||||
|
||||
def _config_caching(self):
|
||||
from flask_cache import Cache
|
||||
from flask_caching import Cache
|
||||
self.cache = Cache(self)
|
||||
|
||||
def set_languages(self, translations_folder: pathlib.Path):
|
||||
@@ -480,10 +477,11 @@ class PillarServer(BlinkerCompatibleEve):
|
||||
|
||||
# Pillar-defined Celery task modules:
|
||||
celery_task_modules = [
|
||||
'pillar.celery.tasks',
|
||||
'pillar.celery.search_index_tasks',
|
||||
'pillar.celery.file_link_tasks',
|
||||
'pillar.celery.badges',
|
||||
'pillar.celery.email_tasks',
|
||||
'pillar.celery.file_link_tasks',
|
||||
'pillar.celery.search_index_tasks',
|
||||
'pillar.celery.tasks',
|
||||
]
|
||||
|
||||
# Allow Pillar extensions from defining their own Celery tasks.
|
||||
@@ -705,6 +703,8 @@ class PillarServer(BlinkerCompatibleEve):
|
||||
def finish_startup(self):
|
||||
self.log.info('Using MongoDB database %r', self.config['MONGO_DBNAME'])
|
||||
|
||||
with self.app_context():
|
||||
self.setup_db_indices()
|
||||
self._config_celery()
|
||||
|
||||
api.setup_app(self)
|
||||
@@ -712,6 +712,10 @@ class PillarServer(BlinkerCompatibleEve):
|
||||
|
||||
authentication.setup_app(self)
|
||||
|
||||
# Register Flask Debug Toolbar (disabled by default).
|
||||
from flask_debugtoolbar import DebugToolbarExtension
|
||||
DebugToolbarExtension(self)
|
||||
|
||||
for ext in self.pillar_extensions.values():
|
||||
self.log.info('Setting up extension %s', ext.name)
|
||||
ext.setup_app(self)
|
||||
@@ -722,6 +726,7 @@ class PillarServer(BlinkerCompatibleEve):
|
||||
self._config_user_caps()
|
||||
|
||||
# Only enable this when debugging.
|
||||
# TODO(fsiddi): Consider removing this in favor of the routes tab in Flask Debug Toolbar.
|
||||
# self._list_routes()
|
||||
|
||||
def setup_db_indices(self):
|
||||
@@ -761,6 +766,8 @@ class PillarServer(BlinkerCompatibleEve):
|
||||
coll.create_index([('properties.status', pymongo.ASCENDING),
|
||||
('node_type', pymongo.ASCENDING),
|
||||
('_created', pymongo.DESCENDING)])
|
||||
# Used for asset tags
|
||||
coll.create_index([('properties.tags', pymongo.ASCENDING)])
|
||||
|
||||
coll = db['projects']
|
||||
# This index is used for statistics, and for fetching public projects.
|
||||
|
@@ -1,6 +1,6 @@
|
||||
def setup_app(app):
|
||||
from . import encoding, blender_id, projects, local_auth, file_storage
|
||||
from . import users, nodes, latest, blender_cloud, service, activities
|
||||
from . import users, nodes, latest, blender_cloud, service, activities, timeline
|
||||
from . import organizations
|
||||
from . import search
|
||||
|
||||
@@ -11,6 +11,7 @@ def setup_app(app):
|
||||
local_auth.setup_app(app, url_prefix='/auth')
|
||||
file_storage.setup_app(app, url_prefix='/storage')
|
||||
latest.setup_app(app, url_prefix='/latest')
|
||||
timeline.setup_app(app, url_prefix='/timeline')
|
||||
blender_cloud.setup_app(app, url_prefix='/bcloud')
|
||||
users.setup_app(app, api_prefix='/users')
|
||||
service.setup_app(app, api_prefix='/service')
|
||||
|
@@ -6,6 +6,7 @@ with Blender ID.
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import requests
|
||||
from bson import tz_util
|
||||
@@ -47,13 +48,6 @@ def store_subclient_token():
|
||||
'subclient_user_id': str(db_user['_id'])}), status
|
||||
|
||||
|
||||
def blender_id_endpoint():
|
||||
"""Gets the endpoint for the authentication API. If the env variable
|
||||
is defined, it's possible to override the (default) production address.
|
||||
"""
|
||||
return current_app.config['BLENDER_ID_ENDPOINT'].rstrip('/')
|
||||
|
||||
|
||||
def validate_create_user(blender_id_user_id, token, oauth_subclient_id):
|
||||
"""Validates a user against Blender ID, creating the user in our database.
|
||||
|
||||
@@ -121,13 +115,14 @@ def validate_token(user_id, token, oauth_subclient_id):
|
||||
# We only want to accept Blender Cloud tokens.
|
||||
payload['client_id'] = current_app.config['OAUTH_CREDENTIALS']['blender-id']['id']
|
||||
|
||||
url = '{0}/u/validate_token'.format(blender_id_endpoint())
|
||||
blender_id_endpoint = current_app.config['BLENDER_ID_ENDPOINT']
|
||||
url = urljoin(blender_id_endpoint, 'u/validate_token')
|
||||
log.debug('POSTing to %r', url)
|
||||
|
||||
# Retry a few times when POSTing to BlenderID fails.
|
||||
# Source: http://stackoverflow.com/a/15431343/875379
|
||||
s = requests.Session()
|
||||
s.mount(blender_id_endpoint(), HTTPAdapter(max_retries=5))
|
||||
s.mount(blender_id_endpoint, HTTPAdapter(max_retries=5))
|
||||
|
||||
# POST to Blender ID, handling errors as negative verification results.
|
||||
try:
|
||||
@@ -225,7 +220,7 @@ def fetch_blenderid_user() -> dict:
|
||||
|
||||
my_log = log.getChild('fetch_blenderid_user')
|
||||
|
||||
bid_url = '%s/api/user' % blender_id_endpoint()
|
||||
bid_url = urljoin(current_app.config['BLENDER_ID_ENDPOINT'], 'api/user')
|
||||
my_log.debug('Fetching user info from %s', bid_url)
|
||||
|
||||
credentials = current_app.config['OAUTH_CREDENTIALS']['blender-id']
|
||||
@@ -270,7 +265,7 @@ def setup_app(app, url_prefix):
|
||||
def switch_user_url(next_url: str) -> str:
|
||||
from urllib.parse import quote
|
||||
|
||||
base_url = '%s/switch' % blender_id_endpoint()
|
||||
base_url = urljoin(current_app.config['BLENDER_ID_ENDPOINT'], 'switch')
|
||||
if next_url:
|
||||
return '%s?next=%s' % (base_url, quote(next_url))
|
||||
return base_url
|
||||
|
@@ -1,8 +1,8 @@
|
||||
import copy
|
||||
from datetime import datetime
|
||||
import logging
|
||||
|
||||
from bson import ObjectId, tz_util
|
||||
from datetime import datetime
|
||||
import cerberus.errors
|
||||
from eve.io.mongo import Validator
|
||||
from flask import current_app
|
||||
|
||||
@@ -12,6 +12,31 @@ log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ValidateCustomFields(Validator):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
# Will be reference to the actual document being validated, so that we can
|
||||
# modify it during validation.
|
||||
self.__real_document = None
|
||||
|
||||
def validate(self, document, *args, **kwargs):
|
||||
# Keep a reference to the actual document, because Cerberus validates copies.
|
||||
self.__real_document = document
|
||||
result = super().validate(document, *args, **kwargs)
|
||||
|
||||
# Store the in-place modified document as self.document, so that Eve's post_internal
|
||||
# can actually pick it up as the validated document. We need to make a copy so that
|
||||
# further modifications (like setting '_etag' etc.) aren't done in-place.
|
||||
self.document = copy.deepcopy(document)
|
||||
|
||||
return result
|
||||
|
||||
def _get_child_validator(self, *args, **kwargs):
|
||||
child = super()._get_child_validator(*args, **kwargs)
|
||||
# Pass along our reference to the actual document.
|
||||
child.__real_document = self.__real_document
|
||||
return child
|
||||
|
||||
# TODO: split this into a convert_property(property, schema) and call that from this function.
|
||||
def convert_properties(self, properties, node_schema):
|
||||
"""Converts datetime strings and ObjectId strings to actual Python objects."""
|
||||
@@ -73,6 +98,11 @@ class ValidateCustomFields(Validator):
|
||||
dict_property[key] = self.convert_properties(item_prop, item_schema)['item']
|
||||
|
||||
def _validate_valid_properties(self, valid_properties, field, value):
|
||||
"""Fake property that triggers node dynamic property validation.
|
||||
|
||||
The rule's arguments are validated against this schema:
|
||||
{'type': 'boolean'}
|
||||
"""
|
||||
from pillar.api.utils import project_get_node_type
|
||||
|
||||
projects_collection = current_app.data.driver.db['projects']
|
||||
@@ -107,7 +137,8 @@ class ValidateCustomFields(Validator):
|
||||
if val:
|
||||
# This ensures the modifications made by v's coercion rules are
|
||||
# visible to this validator's output.
|
||||
self.current[field] = v.current
|
||||
# TODO(fsiddi): this no longer works due to Cerberus internal changes.
|
||||
# self.current[field] = v.current
|
||||
return True
|
||||
|
||||
log.warning('Error validating properties for node %s: %s', self.document, v.errors)
|
||||
@@ -118,6 +149,9 @@ class ValidateCustomFields(Validator):
|
||||
|
||||
Combine "required_after_creation=True" with "required=False" to allow
|
||||
pre-insert hooks to set default values.
|
||||
|
||||
The rule's arguments are validated against this schema:
|
||||
{'type': 'boolean'}
|
||||
"""
|
||||
|
||||
if not required_after_creation:
|
||||
@@ -125,14 +159,14 @@ class ValidateCustomFields(Validator):
|
||||
# validator at all.
|
||||
return
|
||||
|
||||
if self._id is None:
|
||||
if self.document_id is None:
|
||||
# This is a creation call, in which case this validator shouldn't run.
|
||||
return
|
||||
|
||||
if not value:
|
||||
self._error(field, "Value is required once the document was created")
|
||||
|
||||
def _validate_type_iprange(self, field_name: str, value: str):
|
||||
def _validator_iprange(self, field_name: str, value: str):
|
||||
"""Ensure the field contains a valid IP address.
|
||||
|
||||
Supports both IPv6 and IPv4 ranges. Requires the IPy module.
|
||||
@@ -149,40 +183,36 @@ class ValidateCustomFields(Validator):
|
||||
if ip.prefixlen() == 0:
|
||||
self._error(field_name, 'Zero-length prefix is not allowed')
|
||||
|
||||
def _validate_type_binary(self, field_name: str, value: bytes):
|
||||
"""Add support for binary type.
|
||||
|
||||
This type was actually introduced in Cerberus 1.0, so we can drop
|
||||
support for this once Eve starts using that version (or newer).
|
||||
def _validator_markdown(self, field, value):
|
||||
"""Convert MarkDown.
|
||||
"""
|
||||
my_log = log.getChild('_validator_markdown')
|
||||
|
||||
if not isinstance(value, (bytes, bytearray)):
|
||||
self._error(field_name, f'wrong value type {type(value)}, expected bytes or bytearray')
|
||||
# Find this field inside the original document
|
||||
my_subdoc = self._subdoc_in_real_document()
|
||||
if my_subdoc is None:
|
||||
# If self.update==True we are validating an update document, which
|
||||
# may not contain all fields, so then a missing field is fine.
|
||||
if not self.update:
|
||||
self._error(field, f'validator_markdown: unable to find sub-document '
|
||||
f'for path {self.document_path}')
|
||||
return
|
||||
|
||||
def _validate_coerce(self, coerce, field: str, value):
|
||||
"""Override Cerberus' _validate_coerce method for richer features.
|
||||
|
||||
This now supports named coercion functions (available in Cerberus 1.0+)
|
||||
and passes the field name to coercion functions as well.
|
||||
"""
|
||||
if isinstance(coerce, str):
|
||||
coerce = getattr(self, f'_normalize_coerce_{coerce}')
|
||||
|
||||
try:
|
||||
return coerce(field, value)
|
||||
except (TypeError, ValueError):
|
||||
self._error(field, cerberus.errors.ERROR_COERCION_FAILED.format(field))
|
||||
|
||||
def _normalize_coerce_markdown(self, field: str, value):
|
||||
"""Render Markdown from this field into {field}_html.
|
||||
|
||||
The field name MUST NOT end in `_html`. The Markdown is read from this
|
||||
field and the rendered HTML is written to the field `{field}_html`.
|
||||
"""
|
||||
my_log.debug('validating field %r with value %r', field, value)
|
||||
save_to = pillar.markdown.cache_field_name(field)
|
||||
html = pillar.markdown.markdown(value)
|
||||
field_name = pillar.markdown.cache_field_name(field)
|
||||
self.current[field_name] = html
|
||||
return value
|
||||
my_log.debug('saving result to %r in doc with id %s', save_to, id(my_subdoc))
|
||||
my_subdoc[save_to] = html
|
||||
|
||||
def _subdoc_in_real_document(self):
|
||||
"""Return a reference to the current sub-document inside the real document.
|
||||
|
||||
This allows modification of the document being validated.
|
||||
"""
|
||||
my_subdoc = getattr(self, 'persisted_document') or self.__real_document
|
||||
for item in self.document_path:
|
||||
my_subdoc = my_subdoc[item]
|
||||
return my_subdoc
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
@@ -190,12 +220,12 @@ if __name__ == '__main__':
|
||||
|
||||
v = ValidateCustomFields()
|
||||
v.schema = {
|
||||
'foo': {'type': 'string', 'coerce': 'markdown'},
|
||||
'foo': {'type': 'string', 'validator': 'markdown'},
|
||||
'foo_html': {'type': 'string'},
|
||||
'nested': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
'bar': {'type': 'string', 'coerce': 'markdown'},
|
||||
'bar': {'type': 'string', 'validator': 'markdown'},
|
||||
'bar_html': {'type': 'string'},
|
||||
}
|
||||
}
|
||||
|
@@ -121,12 +121,43 @@ users_schema = {
|
||||
'service': {
|
||||
'type': 'dict',
|
||||
'allow_unknown': True,
|
||||
},
|
||||
|
||||
# Node-specific information for this user.
|
||||
'nodes': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
'badger': {
|
||||
'type': 'list',
|
||||
'schema': {'type': 'string'}
|
||||
}
|
||||
}
|
||||
# Per watched video info about where the user left off, both in time and in percent.
|
||||
'view_progress': {
|
||||
'type': 'dict',
|
||||
# Keyed by Node ID of the video asset. MongoDB doesn't support using
|
||||
# ObjectIds as key, so we cast them to string instead.
|
||||
'keyschema': {'type': 'string'},
|
||||
'valueschema': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
'progress_in_sec': {'type': 'float', 'min': 0},
|
||||
'progress_in_percent': {'type': 'integer', 'min': 0, 'max': 100},
|
||||
|
||||
# When the progress was last updated, so we can limit this history to
|
||||
# the last-watched N videos if we want, or show stuff in chrono order.
|
||||
'last_watched': {'type': 'datetime'},
|
||||
|
||||
# True means progress_in_percent = 100, for easy querying
|
||||
'done': {'type': 'boolean', 'default': False},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
},
|
||||
},
|
||||
|
||||
'badges': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
'html': {'type': 'string'}, # HTML fetched from Blender ID.
|
||||
'expires': {'type': 'datetime'}, # When we should fetch it again.
|
||||
},
|
||||
},
|
||||
|
||||
# Properties defined by extensions. Extensions should use their name (see the
|
||||
@@ -155,7 +186,7 @@ organizations_schema = {
|
||||
'description': {
|
||||
'type': 'string',
|
||||
'maxlength': 256,
|
||||
'coerce': 'markdown',
|
||||
'validator': 'markdown',
|
||||
},
|
||||
'_description_html': {'type': 'string'},
|
||||
'website': {
|
||||
@@ -227,7 +258,7 @@ organizations_schema = {
|
||||
'start': {'type': 'binary', 'required': True},
|
||||
'end': {'type': 'binary', 'required': True},
|
||||
'prefix': {'type': 'integer', 'required': True},
|
||||
'human': {'type': 'iprange', 'required': True},
|
||||
'human': {'type': 'string', 'required': True, 'validator': 'iprange'},
|
||||
}
|
||||
},
|
||||
},
|
||||
@@ -292,7 +323,7 @@ nodes_schema = {
|
||||
},
|
||||
'description': {
|
||||
'type': 'string',
|
||||
'coerce': 'markdown',
|
||||
'validator': 'markdown',
|
||||
},
|
||||
'_description_html': {'type': 'string'},
|
||||
'picture': _file_embedded_schema,
|
||||
@@ -327,7 +358,7 @@ nodes_schema = {
|
||||
'properties': {
|
||||
'type': 'dict',
|
||||
'valid_properties': True,
|
||||
'required': True,
|
||||
'required': True
|
||||
},
|
||||
'permissions': {
|
||||
'type': 'dict',
|
||||
@@ -345,11 +376,11 @@ tokens_schema = {
|
||||
},
|
||||
'token': {
|
||||
'type': 'string',
|
||||
'required': False,
|
||||
'required': True,
|
||||
},
|
||||
'token_hashed': {
|
||||
'type': 'string',
|
||||
'required': True,
|
||||
'required': False,
|
||||
},
|
||||
'expire_time': {
|
||||
'type': 'datetime',
|
||||
@@ -368,6 +399,13 @@ tokens_schema = {
|
||||
'type': 'string',
|
||||
},
|
||||
},
|
||||
|
||||
# OAuth scopes granted to this token.
|
||||
'oauth_scopes': {
|
||||
'type': 'list',
|
||||
'default': [],
|
||||
'schema': {'type': 'string'},
|
||||
}
|
||||
}
|
||||
|
||||
files_schema = {
|
||||
@@ -539,7 +577,7 @@ projects_schema = {
|
||||
},
|
||||
'description': {
|
||||
'type': 'string',
|
||||
'coerce': 'markdown',
|
||||
'validator': 'markdown',
|
||||
},
|
||||
'_description_html': {'type': 'string'},
|
||||
# Short summary for the project
|
||||
@@ -833,4 +871,9 @@ UPSET_ON_PUT = False # do not create new document on PUT of non-existant URL.
|
||||
X_DOMAINS = '*'
|
||||
X_ALLOW_CREDENTIALS = True
|
||||
X_HEADERS = 'Authorization'
|
||||
XML = False
|
||||
RENDERERS = ['eve.render.JSONRenderer']
|
||||
|
||||
# TODO(Sybren): this is a quick workaround to make /p/{url}/jstree work again.
|
||||
# Apparently Eve is now stricter in checking against MONGO_QUERY_BLACKLIST, and
|
||||
# blocks our use of $regex.
|
||||
MONGO_QUERY_BLACKLIST = ['$where']
|
||||
|
@@ -130,6 +130,67 @@ def _process_image(bucket: Bucket,
|
||||
src_file['status'] = 'complete'
|
||||
|
||||
|
||||
def _video_duration_seconds(filename: pathlib.Path) -> typing.Optional[int]:
|
||||
"""Get the duration of a video file using ffprobe
|
||||
https://superuser.com/questions/650291/how-to-get-video-duration-in-seconds
|
||||
|
||||
:param filename: file path to video
|
||||
:return: video duration in seconds
|
||||
"""
|
||||
import subprocess
|
||||
|
||||
def run(cli_args):
|
||||
if log.isEnabledFor(logging.INFO):
|
||||
import shlex
|
||||
cmd = ' '.join(shlex.quote(s) for s in cli_args)
|
||||
log.info('Calling %s', cmd)
|
||||
|
||||
ffprobe = subprocess.run(
|
||||
cli_args,
|
||||
stdin=subprocess.DEVNULL,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
timeout=10, # seconds
|
||||
)
|
||||
|
||||
if ffprobe.returncode:
|
||||
import shlex
|
||||
cmd = ' '.join(shlex.quote(s) for s in cli_args)
|
||||
log.error('Error running %s: stopped with return code %i',
|
||||
cmd, ffprobe.returncode)
|
||||
log.error('Output was: %s', ffprobe.stdout)
|
||||
return None
|
||||
|
||||
try:
|
||||
return int(float(ffprobe.stdout))
|
||||
except ValueError as e:
|
||||
log.exception('ffprobe produced invalid number: %s', ffprobe.stdout)
|
||||
return None
|
||||
|
||||
ffprobe_from_container_args = [
|
||||
current_app.config['BIN_FFPROBE'],
|
||||
'-v', 'error',
|
||||
'-show_entries', 'format=duration',
|
||||
'-of', 'default=noprint_wrappers=1:nokey=1',
|
||||
str(filename),
|
||||
]
|
||||
|
||||
ffprobe_from_stream_args = [
|
||||
current_app.config['BIN_FFPROBE'],
|
||||
'-v', 'error',
|
||||
'-hide_banner',
|
||||
'-select_streams', 'v:0', # we only care about the first video stream
|
||||
'-show_entries', 'stream=duration',
|
||||
'-of', 'default=noprint_wrappers=1:nokey=1',
|
||||
str(filename),
|
||||
]
|
||||
|
||||
duration = run(ffprobe_from_stream_args) or\
|
||||
run(ffprobe_from_container_args) or\
|
||||
None
|
||||
return duration
|
||||
|
||||
|
||||
def _video_size_pixels(filename: pathlib.Path) -> typing.Tuple[int, int]:
|
||||
"""Figures out the size (in pixels) of the video file.
|
||||
|
||||
@@ -220,8 +281,10 @@ def _process_video(gcs,
|
||||
# by determining the video size here we already have this information in the file
|
||||
# document before Zencoder calls our notification URL. It also opens up possibilities
|
||||
# for other encoding backends that don't support this functionality.
|
||||
video_width, video_height = _video_size_pixels(pathlib.Path(local_file.name))
|
||||
video_path = pathlib.Path(local_file.name)
|
||||
video_width, video_height = _video_size_pixels(video_path)
|
||||
capped_video_width, capped_video_height = _video_cap_at_1080(video_width, video_height)
|
||||
video_duration = _video_duration_seconds(video_path)
|
||||
|
||||
# Create variations
|
||||
root, _ = os.path.splitext(src_file['file_path'])
|
||||
@@ -234,12 +297,13 @@ def _process_video(gcs,
|
||||
content_type='video/{}'.format(v),
|
||||
file_path='{}-{}.{}'.format(root, v, v),
|
||||
size='',
|
||||
duration=0,
|
||||
width=capped_video_width,
|
||||
height=capped_video_height,
|
||||
length=0,
|
||||
md5='',
|
||||
)
|
||||
if video_duration:
|
||||
file_variation['duration'] = video_duration
|
||||
# Append file variation. Originally mp4 and webm were the available options,
|
||||
# that's why we build a list.
|
||||
src_file['variations'].append(file_variation)
|
||||
|
@@ -29,7 +29,6 @@ def latest_nodes(db_filter, projection, limit):
|
||||
proj = {
|
||||
'_created': 1,
|
||||
'_updated': 1,
|
||||
'user.full_name': 1,
|
||||
'project._id': 1,
|
||||
'project.url': 1,
|
||||
'project.name': 1,
|
||||
@@ -70,6 +69,7 @@ def latest_assets():
|
||||
{'name': 1, 'node_type': 1,
|
||||
'parent': 1, 'picture': 1, 'properties.status': 1,
|
||||
'properties.content_type': 1,
|
||||
'properties.duration_seconds': 1,
|
||||
'permissions.world': 1},
|
||||
12)
|
||||
|
||||
@@ -80,7 +80,7 @@ def latest_assets():
|
||||
def latest_comments():
|
||||
latest = latest_nodes({'node_type': 'comment',
|
||||
'properties.status': 'published'},
|
||||
{'parent': 1,
|
||||
{'parent': 1, 'user.full_name': 1,
|
||||
'properties.content': 1, 'node_type': 1,
|
||||
'properties.status': 1,
|
||||
'properties.is_reply': 1},
|
||||
|
@@ -94,17 +94,10 @@ def generate_and_store_token(user_id, days=15, prefix=b'') -> dict:
|
||||
|
||||
# Use 'xy' as altargs to prevent + and / characters from appearing.
|
||||
# We never have to b64decode the string anyway.
|
||||
token_bytes = prefix + base64.b64encode(random_bits, altchars=b'xy').strip(b'=')
|
||||
token = token_bytes.decode('ascii')
|
||||
token = prefix + base64.b64encode(random_bits, altchars=b'xy').strip(b'=')
|
||||
|
||||
token_expiry = utcnow() + datetime.timedelta(days=days)
|
||||
token_data = store_token(user_id, token, token_expiry)
|
||||
|
||||
# Include the token in the returned document so that it can be stored client-side,
|
||||
# in configuration, etc.
|
||||
token_data['token'] = token
|
||||
|
||||
return token_data
|
||||
return store_token(user_id, token.decode('ascii'), token_expiry)
|
||||
|
||||
|
||||
def hash_password(password: str, salt: typing.Union[str, bytes]) -> str:
|
||||
|
@@ -12,7 +12,7 @@ ATTACHMENT_SLUG_REGEX = r'[a-zA-Z0-9_\-]+'
|
||||
attachments_embedded_schema = {
|
||||
'type': 'dict',
|
||||
# TODO: will be renamed to 'keyschema' in Cerberus 1.0
|
||||
'propertyschema': {
|
||||
'keyschema': {
|
||||
'type': 'string',
|
||||
'regex': '^%s$' % ATTACHMENT_SLUG_REGEX,
|
||||
},
|
||||
@@ -40,6 +40,51 @@ attachments_embedded_schema = {
|
||||
},
|
||||
}
|
||||
|
||||
# TODO (fsiddi) reference this schema in all node_types that allow ratings
|
||||
ratings_embedded_schema = {
|
||||
'type': 'dict',
|
||||
# Total count of positive ratings (updated at every rating action)
|
||||
'schema': {
|
||||
'positive': {
|
||||
'type': 'integer',
|
||||
},
|
||||
# Total count of negative ratings (updated at every rating action)
|
||||
'negative': {
|
||||
'type': 'integer',
|
||||
},
|
||||
# Collection of ratings, keyed by user
|
||||
'ratings': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
'user': {
|
||||
'type': 'objectid',
|
||||
'data_relation': {
|
||||
'resource': 'users',
|
||||
'field': '_id',
|
||||
'embeddable': False
|
||||
}
|
||||
},
|
||||
'is_positive': {
|
||||
'type': 'boolean'
|
||||
},
|
||||
# Weight of the rating based on user rep and the context.
|
||||
# Currently we have the following weights:
|
||||
# - 1 auto null
|
||||
# - 2 manual null
|
||||
# - 3 auto valid
|
||||
# - 4 manual valid
|
||||
'weight': {
|
||||
'type': 'integer'
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
'hot': {'type': 'float'},
|
||||
},
|
||||
}
|
||||
|
||||
# Import after defining the common embedded schemas, to prevent dependency cycles.
|
||||
from pillar.api.node_types.asset import node_type_asset
|
||||
from pillar.api.node_types.blog import node_type_blog
|
||||
|
@@ -24,6 +24,10 @@ node_type_asset = {
|
||||
'content_type': {
|
||||
'type': 'string'
|
||||
},
|
||||
# The duration of a video asset in seconds.
|
||||
'duration_seconds': {
|
||||
'type': 'integer'
|
||||
},
|
||||
# We point to the original file (and use it to extract any relevant
|
||||
# variation useful for our scope).
|
||||
'file': _file_embedded_schema,
|
||||
@@ -58,6 +62,7 @@ node_type_asset = {
|
||||
},
|
||||
'form_schema': {
|
||||
'content_type': {'visible': False},
|
||||
'duration_seconds': {'visible': False},
|
||||
'order': {'visible': False},
|
||||
'tags': {'visible': False},
|
||||
'categories': {'visible': False},
|
||||
|
@@ -7,7 +7,7 @@ node_type_comment = {
|
||||
'type': 'string',
|
||||
'minlength': 5,
|
||||
'required': True,
|
||||
'coerce': 'markdown',
|
||||
'validator': 'markdown',
|
||||
},
|
||||
'_content_html': {'type': 'string'},
|
||||
'status': {
|
||||
|
@@ -3,7 +3,7 @@ node_type_group = {
|
||||
'description': 'Folder node type',
|
||||
'parent': ['group', 'project'],
|
||||
'dyn_schema': {
|
||||
# Used for sorting within the context of a group
|
||||
|
||||
'order': {
|
||||
'type': 'integer'
|
||||
},
|
||||
@@ -20,7 +20,8 @@ node_type_group = {
|
||||
'notes': {
|
||||
'type': 'string',
|
||||
'maxlength': 256,
|
||||
},
|
||||
}
|
||||
|
||||
},
|
||||
'form_schema': {
|
||||
'url': {'visible': False},
|
||||
|
@@ -9,7 +9,7 @@ node_type_post = {
|
||||
'minlength': 5,
|
||||
'maxlength': 90000,
|
||||
'required': True,
|
||||
'coerce': 'markdown',
|
||||
'validator': 'markdown',
|
||||
},
|
||||
'_content_html': {'type': 'string'},
|
||||
'status': {
|
||||
|
@@ -1,58 +1,21 @@
|
||||
import base64
|
||||
import functools
|
||||
import datetime
|
||||
import logging
|
||||
import urllib.parse
|
||||
|
||||
import pymongo.errors
|
||||
import werkzeug.exceptions as wz_exceptions
|
||||
from bson import ObjectId
|
||||
from flask import current_app, Blueprint, request
|
||||
|
||||
from pillar.api.activities import activity_subscribe, activity_object_add
|
||||
from pillar.api.node_types import PILLAR_NAMED_NODE_TYPES
|
||||
from pillar.api.file_storage_backends.gcs import update_file_name
|
||||
from pillar.api.nodes import eve_hooks
|
||||
from pillar.api.utils import str2id, jsonify
|
||||
from pillar.api.utils.authorization import check_permissions, require_login
|
||||
from pillar.web.utils import pretty_date
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
blueprint = Blueprint('nodes_api', __name__)
|
||||
ROLES_FOR_SHARING = {'subscriber', 'demo'}
|
||||
|
||||
|
||||
def only_for_node_type_decorator(*required_node_type_names):
|
||||
"""Returns a decorator that checks its first argument's node type.
|
||||
|
||||
If the node type is not of the required node type, returns None,
|
||||
otherwise calls the wrapped function.
|
||||
|
||||
>>> deco = only_for_node_type_decorator('comment')
|
||||
>>> @deco
|
||||
... def handle_comment(node): pass
|
||||
|
||||
>>> deco = only_for_node_type_decorator('comment', 'post')
|
||||
>>> @deco
|
||||
... def handle_comment_or_post(node): pass
|
||||
|
||||
"""
|
||||
|
||||
# Convert to a set for efficient 'x in required_node_type_names' queries.
|
||||
required_node_type_names = set(required_node_type_names)
|
||||
|
||||
def only_for_node_type(wrapped):
|
||||
@functools.wraps(wrapped)
|
||||
def wrapper(node, *args, **kwargs):
|
||||
if node.get('node_type') not in required_node_type_names:
|
||||
return
|
||||
|
||||
return wrapped(node, *args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
only_for_node_type.__doc__ = "Decorator, immediately returns when " \
|
||||
"the first argument is not of type %s." % required_node_type_names
|
||||
return only_for_node_type
|
||||
|
||||
|
||||
@blueprint.route('/<node_id>/share', methods=['GET', 'POST'])
|
||||
@require_login(require_roles=ROLES_FOR_SHARING)
|
||||
def share_node(node_id):
|
||||
@@ -85,7 +48,80 @@ def share_node(node_id):
|
||||
else:
|
||||
return '', 204
|
||||
|
||||
return jsonify(short_link_info(short_code), status=status)
|
||||
return jsonify(eve_hooks.short_link_info(short_code), status=status)
|
||||
|
||||
|
||||
@blueprint.route('/tagged/')
|
||||
@blueprint.route('/tagged/<tag>')
|
||||
def tagged(tag=''):
|
||||
"""Return all tagged nodes of public projects as JSON."""
|
||||
from pillar.auth import current_user
|
||||
|
||||
# We explicitly register the tagless endpoint to raise a 404, otherwise the PATCH
|
||||
# handler on /api/nodes/<node_id> will return a 405 Method Not Allowed.
|
||||
if not tag:
|
||||
raise wz_exceptions.NotFound()
|
||||
|
||||
# Build the (cached) list of tagged nodes
|
||||
agg_list = _tagged(tag)
|
||||
|
||||
for node in agg_list:
|
||||
if node['properties'].get('duration_seconds'):
|
||||
node['properties']['duration'] = datetime.timedelta(seconds=node['properties']['duration_seconds'])
|
||||
|
||||
if node.get('_created') is not None:
|
||||
node['pretty_created'] = pretty_date(node['_created'])
|
||||
|
||||
# If the user is anonymous, no more information is needed and we return
|
||||
if current_user.is_anonymous:
|
||||
return jsonify(agg_list)
|
||||
|
||||
# If the user is authenticated, attach view_progress for video assets
|
||||
view_progress = current_user.nodes['view_progress']
|
||||
for node in agg_list:
|
||||
node_id = str(node['_id'])
|
||||
# View progress should be added only for nodes of type 'asset' and
|
||||
# with content_type 'video', only if the video was already in the watched
|
||||
# list for the current user.
|
||||
if node_id in view_progress:
|
||||
node['view_progress'] = view_progress[node_id]
|
||||
|
||||
return jsonify(agg_list)
|
||||
|
||||
|
||||
def _tagged(tag: str):
|
||||
"""Fetch all public nodes with the given tag.
|
||||
|
||||
This function is cached, see setup_app().
|
||||
"""
|
||||
nodes_coll = current_app.db('nodes')
|
||||
agg = nodes_coll.aggregate([
|
||||
{'$match': {'properties.tags': tag,
|
||||
'_deleted': {'$ne': True}}},
|
||||
|
||||
# Only get nodes from public projects. This is done after matching the
|
||||
# tagged nodes, because most likely nobody else will be able to tag
|
||||
# nodes anyway.
|
||||
{'$lookup': {
|
||||
'from': 'projects',
|
||||
'localField': 'project',
|
||||
'foreignField': '_id',
|
||||
'as': '_project',
|
||||
}},
|
||||
{'$unwind': '$_project'},
|
||||
{'$match': {'_project.is_private': False}},
|
||||
{'$addFields': {
|
||||
'project._id': '$_project._id',
|
||||
'project.name': '$_project.name',
|
||||
'project.url': '$_project.url',
|
||||
}},
|
||||
|
||||
# Don't return the entire project/file for each node.
|
||||
{'$project': {'_project': False}},
|
||||
{'$sort': {'_created': -1}}
|
||||
])
|
||||
|
||||
return list(agg)
|
||||
|
||||
|
||||
def generate_and_store_short_code(node):
|
||||
@@ -163,265 +199,35 @@ def create_short_code(node) -> str:
|
||||
return short_code
|
||||
|
||||
|
||||
def short_link_info(short_code):
|
||||
"""Returns the short link info in a dict."""
|
||||
|
||||
short_link = urllib.parse.urljoin(
|
||||
current_app.config['SHORT_LINK_BASE_URL'], short_code)
|
||||
|
||||
return {
|
||||
'short_code': short_code,
|
||||
'short_link': short_link,
|
||||
}
|
||||
|
||||
|
||||
def before_replacing_node(item, original):
|
||||
check_permissions('nodes', original, 'PUT')
|
||||
update_file_name(item)
|
||||
|
||||
|
||||
def after_replacing_node(item, original):
|
||||
"""Push an update to the Algolia index when a node item is updated. If the
|
||||
project is private, prevent public indexing.
|
||||
"""
|
||||
|
||||
from pillar.celery import search_index_tasks as index
|
||||
|
||||
projects_collection = current_app.data.driver.db['projects']
|
||||
project = projects_collection.find_one({'_id': item['project']})
|
||||
if project.get('is_private', False):
|
||||
# Skip index updating and return
|
||||
return
|
||||
|
||||
status = item['properties'].get('status', 'unpublished')
|
||||
node_id = str(item['_id'])
|
||||
|
||||
if status == 'published':
|
||||
index.node_save.delay(node_id)
|
||||
else:
|
||||
index.node_delete.delay(node_id)
|
||||
|
||||
|
||||
def before_inserting_nodes(items):
|
||||
"""Before inserting a node in the collection we check if the user is allowed
|
||||
and we append the project id to it.
|
||||
"""
|
||||
from pillar.auth import current_user
|
||||
|
||||
nodes_collection = current_app.data.driver.db['nodes']
|
||||
|
||||
def find_parent_project(node):
|
||||
"""Recursive function that finds the ultimate parent of a node."""
|
||||
if node and 'parent' in node:
|
||||
parent = nodes_collection.find_one({'_id': node['parent']})
|
||||
return find_parent_project(parent)
|
||||
if node:
|
||||
return node
|
||||
else:
|
||||
return None
|
||||
|
||||
for item in items:
|
||||
check_permissions('nodes', item, 'POST')
|
||||
if 'parent' in item and 'project' not in item:
|
||||
parent = nodes_collection.find_one({'_id': item['parent']})
|
||||
project = find_parent_project(parent)
|
||||
if project:
|
||||
item['project'] = project['_id']
|
||||
|
||||
# Default the 'user' property to the current user.
|
||||
item.setdefault('user', current_user.user_id)
|
||||
|
||||
|
||||
def after_inserting_nodes(items):
|
||||
for item in items:
|
||||
# Skip subscriptions for first level items (since the context is not a
|
||||
# node, but a project).
|
||||
# TODO: support should be added for mixed context
|
||||
if 'parent' not in item:
|
||||
return
|
||||
context_object_id = item['parent']
|
||||
if item['node_type'] == 'comment':
|
||||
nodes_collection = current_app.data.driver.db['nodes']
|
||||
parent = nodes_collection.find_one({'_id': item['parent']})
|
||||
# Always subscribe to the parent node
|
||||
activity_subscribe(item['user'], 'node', item['parent'])
|
||||
if parent['node_type'] == 'comment':
|
||||
# If the parent is a comment, we provide its own parent as
|
||||
# context. We do this in order to point the user to an asset
|
||||
# or group when viewing the notification.
|
||||
verb = 'replied'
|
||||
context_object_id = parent['parent']
|
||||
# Subscribe to the parent of the parent comment (post or group)
|
||||
activity_subscribe(item['user'], 'node', parent['parent'])
|
||||
else:
|
||||
activity_subscribe(item['user'], 'node', item['_id'])
|
||||
verb = 'commented'
|
||||
elif item['node_type'] in PILLAR_NAMED_NODE_TYPES:
|
||||
verb = 'posted'
|
||||
activity_subscribe(item['user'], 'node', item['_id'])
|
||||
else:
|
||||
# Don't automatically create activities for non-Pillar node types,
|
||||
# as we don't know what would be a suitable verb (among other things).
|
||||
continue
|
||||
|
||||
activity_object_add(
|
||||
item['user'],
|
||||
verb,
|
||||
'node',
|
||||
item['_id'],
|
||||
'node',
|
||||
context_object_id
|
||||
)
|
||||
|
||||
|
||||
def deduct_content_type(node_doc, original=None):
|
||||
"""Deduct the content type from the attached file, if any."""
|
||||
|
||||
if node_doc['node_type'] != 'asset':
|
||||
log.debug('deduct_content_type: called on node type %r, ignoring', node_doc['node_type'])
|
||||
return
|
||||
|
||||
node_id = node_doc.get('_id')
|
||||
try:
|
||||
file_id = ObjectId(node_doc['properties']['file'])
|
||||
except KeyError:
|
||||
if node_id is None:
|
||||
# Creation of a file-less node is allowed, but updates aren't.
|
||||
return
|
||||
log.warning('deduct_content_type: Asset without properties.file, rejecting.')
|
||||
raise wz_exceptions.UnprocessableEntity('Missing file property for asset node')
|
||||
|
||||
files = current_app.data.driver.db['files']
|
||||
file_doc = files.find_one({'_id': file_id},
|
||||
{'content_type': 1})
|
||||
if not file_doc:
|
||||
log.warning('deduct_content_type: Node %s refers to non-existing file %s, rejecting.',
|
||||
node_id, file_id)
|
||||
raise wz_exceptions.UnprocessableEntity('File property refers to non-existing file')
|
||||
|
||||
# Guess the node content type from the file content type
|
||||
file_type = file_doc['content_type']
|
||||
if file_type.startswith('video/'):
|
||||
content_type = 'video'
|
||||
elif file_type.startswith('image/'):
|
||||
content_type = 'image'
|
||||
else:
|
||||
content_type = 'file'
|
||||
|
||||
node_doc['properties']['content_type'] = content_type
|
||||
|
||||
|
||||
def nodes_deduct_content_type(nodes):
|
||||
for node in nodes:
|
||||
deduct_content_type(node)
|
||||
|
||||
|
||||
def before_returning_node(node):
|
||||
# Run validation process, since GET on nodes entry point is public
|
||||
check_permissions('nodes', node, 'GET', append_allowed_methods=True)
|
||||
|
||||
# Embed short_link_info if the node has a short_code.
|
||||
short_code = node.get('short_code')
|
||||
if short_code:
|
||||
node['short_link'] = short_link_info(short_code)['short_link']
|
||||
|
||||
|
||||
def before_returning_nodes(nodes):
|
||||
for node in nodes['_items']:
|
||||
before_returning_node(node)
|
||||
|
||||
|
||||
def node_set_default_picture(node, original=None):
|
||||
"""Uses the image of an image asset or colour map of texture node as picture."""
|
||||
|
||||
if node.get('picture'):
|
||||
log.debug('Node %s already has a picture, not overriding', node.get('_id'))
|
||||
return
|
||||
|
||||
node_type = node.get('node_type')
|
||||
props = node.get('properties', {})
|
||||
content = props.get('content_type')
|
||||
|
||||
if node_type == 'asset' and content == 'image':
|
||||
image_file_id = props.get('file')
|
||||
elif node_type == 'texture':
|
||||
# Find the colour map, defaulting to the first image map available.
|
||||
image_file_id = None
|
||||
for image in props.get('files', []):
|
||||
if image_file_id is None or image.get('map_type') == 'color':
|
||||
image_file_id = image.get('file')
|
||||
else:
|
||||
log.debug('Not setting default picture on node type %s content type %s',
|
||||
node_type, content)
|
||||
return
|
||||
|
||||
if image_file_id is None:
|
||||
log.debug('Nothing to set the picture to.')
|
||||
return
|
||||
|
||||
log.debug('Setting default picture for node %s to %s', node.get('_id'), image_file_id)
|
||||
node['picture'] = image_file_id
|
||||
|
||||
|
||||
def nodes_set_default_picture(nodes):
|
||||
for node in nodes:
|
||||
node_set_default_picture(node)
|
||||
|
||||
|
||||
def before_deleting_node(node: dict):
|
||||
check_permissions('nodes', node, 'DELETE')
|
||||
|
||||
|
||||
def after_deleting_node(item):
|
||||
from pillar.celery import search_index_tasks as index
|
||||
index.node_delete.delay(str(item['_id']))
|
||||
|
||||
|
||||
only_for_textures = only_for_node_type_decorator('texture')
|
||||
|
||||
|
||||
@only_for_textures
|
||||
def texture_sort_files(node, original=None):
|
||||
"""Sort files alphabetically by map type, with colour map first."""
|
||||
|
||||
try:
|
||||
files = node['properties']['files']
|
||||
except KeyError:
|
||||
return
|
||||
|
||||
# Sort the map types alphabetically, ensuring 'color' comes first.
|
||||
as_dict = {f['map_type']: f for f in files}
|
||||
types = sorted(as_dict.keys(), key=lambda k: '\0' if k == 'color' else k)
|
||||
node['properties']['files'] = [as_dict[map_type] for map_type in types]
|
||||
|
||||
|
||||
def textures_sort_files(nodes):
|
||||
for node in nodes:
|
||||
texture_sort_files(node)
|
||||
|
||||
|
||||
def setup_app(app, url_prefix):
|
||||
global _tagged
|
||||
|
||||
cached = app.cache.memoize(timeout=300)
|
||||
_tagged = cached(_tagged)
|
||||
|
||||
from . import patch
|
||||
patch.setup_app(app, url_prefix=url_prefix)
|
||||
|
||||
app.on_fetched_item_nodes += before_returning_node
|
||||
app.on_fetched_resource_nodes += before_returning_nodes
|
||||
app.on_fetched_item_nodes += eve_hooks.before_returning_node
|
||||
app.on_fetched_resource_nodes += eve_hooks.before_returning_nodes
|
||||
|
||||
app.on_replace_nodes += before_replacing_node
|
||||
app.on_replace_nodes += texture_sort_files
|
||||
app.on_replace_nodes += deduct_content_type
|
||||
app.on_replace_nodes += node_set_default_picture
|
||||
app.on_replaced_nodes += after_replacing_node
|
||||
app.on_replace_nodes += eve_hooks.before_replacing_node
|
||||
app.on_replace_nodes += eve_hooks.parse_markdown
|
||||
app.on_replace_nodes += eve_hooks.texture_sort_files
|
||||
app.on_replace_nodes += eve_hooks.deduct_content_type_and_duration
|
||||
app.on_replace_nodes += eve_hooks.node_set_default_picture
|
||||
app.on_replaced_nodes += eve_hooks.after_replacing_node
|
||||
|
||||
app.on_insert_nodes += before_inserting_nodes
|
||||
app.on_insert_nodes += nodes_deduct_content_type
|
||||
app.on_insert_nodes += nodes_set_default_picture
|
||||
app.on_insert_nodes += textures_sort_files
|
||||
app.on_inserted_nodes += after_inserting_nodes
|
||||
app.on_insert_nodes += eve_hooks.before_inserting_nodes
|
||||
app.on_insert_nodes += eve_hooks.parse_markdowns
|
||||
app.on_insert_nodes += eve_hooks.nodes_deduct_content_type_and_duration
|
||||
app.on_insert_nodes += eve_hooks.nodes_set_default_picture
|
||||
app.on_insert_nodes += eve_hooks.textures_sort_files
|
||||
app.on_inserted_nodes += eve_hooks.after_inserting_nodes
|
||||
|
||||
app.on_update_nodes += texture_sort_files
|
||||
app.on_update_nodes += eve_hooks.texture_sort_files
|
||||
|
||||
app.on_delete_item_nodes += before_deleting_node
|
||||
app.on_deleted_item_nodes += after_deleting_node
|
||||
app.on_delete_item_nodes += eve_hooks.before_deleting_node
|
||||
app.on_deleted_item_nodes += eve_hooks.after_deleting_node
|
||||
|
||||
app.register_api_blueprint(blueprint, url_prefix=url_prefix)
|
||||
|
@@ -6,6 +6,7 @@ from flask import current_app
|
||||
import werkzeug.exceptions as wz_exceptions
|
||||
|
||||
from pillar.api.utils import authorization, authentication, jsonify
|
||||
from pillar.api.utils.rating import confidence
|
||||
|
||||
from . import register_patch_handler
|
||||
|
||||
@@ -25,6 +26,13 @@ def patch_comment(node_id, patch):
|
||||
assert patch['op'] == 'edit', 'Invalid patch operation %s' % patch['op']
|
||||
result, node = edit_comment(user_id, node_id, patch)
|
||||
|
||||
# Calculate and update confidence.
|
||||
rating_confidence = confidence(
|
||||
node['properties']['rating_positive'], node['properties']['rating_negative'])
|
||||
current_app.data.driver.db['nodes'].update_one(
|
||||
{'_id': node_id},
|
||||
{'$set': {'properties.confidence': rating_confidence}})
|
||||
|
||||
return jsonify({'_status': 'OK',
|
||||
'result': result,
|
||||
'properties': node['properties']
|
||||
|
374
pillar/api/nodes/eve_hooks.py
Normal file
374
pillar/api/nodes/eve_hooks.py
Normal file
@@ -0,0 +1,374 @@
|
||||
import collections
|
||||
import functools
|
||||
import logging
|
||||
import urllib.parse
|
||||
|
||||
from bson import ObjectId
|
||||
from werkzeug import exceptions as wz_exceptions
|
||||
|
||||
from pillar import current_app
|
||||
import pillar.markdown
|
||||
from pillar.api.activities import activity_subscribe, activity_object_add
|
||||
from pillar.api.file_storage_backends.gcs import update_file_name
|
||||
from pillar.api.node_types import PILLAR_NAMED_NODE_TYPES
|
||||
from pillar.api.utils import random_etag
|
||||
from pillar.api.utils.authorization import check_permissions
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def before_returning_node(node):
|
||||
# Run validation process, since GET on nodes entry point is public
|
||||
check_permissions('nodes', node, 'GET', append_allowed_methods=True)
|
||||
|
||||
# Embed short_link_info if the node has a short_code.
|
||||
short_code = node.get('short_code')
|
||||
if short_code:
|
||||
node['short_link'] = short_link_info(short_code)['short_link']
|
||||
|
||||
|
||||
def before_returning_nodes(nodes):
|
||||
for node in nodes['_items']:
|
||||
before_returning_node(node)
|
||||
|
||||
|
||||
def only_for_node_type_decorator(*required_node_type_names):
|
||||
"""Returns a decorator that checks its first argument's node type.
|
||||
|
||||
If the node type is not of the required node type, returns None,
|
||||
otherwise calls the wrapped function.
|
||||
|
||||
>>> deco = only_for_node_type_decorator('comment')
|
||||
>>> @deco
|
||||
... def handle_comment(node): pass
|
||||
|
||||
>>> deco = only_for_node_type_decorator('comment', 'post')
|
||||
>>> @deco
|
||||
... def handle_comment_or_post(node): pass
|
||||
|
||||
"""
|
||||
|
||||
# Convert to a set for efficient 'x in required_node_type_names' queries.
|
||||
required_node_type_names = set(required_node_type_names)
|
||||
|
||||
def only_for_node_type(wrapped):
|
||||
@functools.wraps(wrapped)
|
||||
def wrapper(node, *args, **kwargs):
|
||||
if node.get('node_type') not in required_node_type_names:
|
||||
return
|
||||
|
||||
return wrapped(node, *args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
only_for_node_type.__doc__ = "Decorator, immediately returns when " \
|
||||
"the first argument is not of type %s." % required_node_type_names
|
||||
return only_for_node_type
|
||||
|
||||
|
||||
def before_replacing_node(item, original):
|
||||
check_permissions('nodes', original, 'PUT')
|
||||
update_file_name(item)
|
||||
|
||||
|
||||
def after_replacing_node(item, original):
|
||||
"""Push an update to the Algolia index when a node item is updated. If the
|
||||
project is private, prevent public indexing.
|
||||
"""
|
||||
|
||||
from pillar.celery import search_index_tasks as index
|
||||
|
||||
projects_collection = current_app.data.driver.db['projects']
|
||||
project = projects_collection.find_one({'_id': item['project']})
|
||||
if project.get('is_private', False):
|
||||
# Skip index updating and return
|
||||
return
|
||||
|
||||
status = item['properties'].get('status', 'unpublished')
|
||||
node_id = str(item['_id'])
|
||||
|
||||
if status == 'published':
|
||||
index.node_save.delay(node_id)
|
||||
else:
|
||||
index.node_delete.delay(node_id)
|
||||
|
||||
|
||||
def before_inserting_nodes(items):
|
||||
"""Before inserting a node in the collection we check if the user is allowed
|
||||
and we append the project id to it.
|
||||
"""
|
||||
from pillar.auth import current_user
|
||||
|
||||
nodes_collection = current_app.data.driver.db['nodes']
|
||||
|
||||
def find_parent_project(node):
|
||||
"""Recursive function that finds the ultimate parent of a node."""
|
||||
if node and 'parent' in node:
|
||||
parent = nodes_collection.find_one({'_id': node['parent']})
|
||||
return find_parent_project(parent)
|
||||
if node:
|
||||
return node
|
||||
else:
|
||||
return None
|
||||
|
||||
for item in items:
|
||||
check_permissions('nodes', item, 'POST')
|
||||
if 'parent' in item and 'project' not in item:
|
||||
parent = nodes_collection.find_one({'_id': item['parent']})
|
||||
project = find_parent_project(parent)
|
||||
if project:
|
||||
item['project'] = project['_id']
|
||||
|
||||
# Default the 'user' property to the current user.
|
||||
item.setdefault('user', current_user.user_id)
|
||||
|
||||
|
||||
def after_inserting_nodes(items):
|
||||
for item in items:
|
||||
# Skip subscriptions for first level items (since the context is not a
|
||||
# node, but a project).
|
||||
# TODO: support should be added for mixed context
|
||||
if 'parent' not in item:
|
||||
return
|
||||
context_object_id = item['parent']
|
||||
if item['node_type'] == 'comment':
|
||||
nodes_collection = current_app.data.driver.db['nodes']
|
||||
parent = nodes_collection.find_one({'_id': item['parent']})
|
||||
# Always subscribe to the parent node
|
||||
activity_subscribe(item['user'], 'node', item['parent'])
|
||||
if parent['node_type'] == 'comment':
|
||||
# If the parent is a comment, we provide its own parent as
|
||||
# context. We do this in order to point the user to an asset
|
||||
# or group when viewing the notification.
|
||||
verb = 'replied'
|
||||
context_object_id = parent['parent']
|
||||
# Subscribe to the parent of the parent comment (post or group)
|
||||
activity_subscribe(item['user'], 'node', parent['parent'])
|
||||
else:
|
||||
activity_subscribe(item['user'], 'node', item['_id'])
|
||||
verb = 'commented'
|
||||
elif item['node_type'] in PILLAR_NAMED_NODE_TYPES:
|
||||
verb = 'posted'
|
||||
activity_subscribe(item['user'], 'node', item['_id'])
|
||||
else:
|
||||
# Don't automatically create activities for non-Pillar node types,
|
||||
# as we don't know what would be a suitable verb (among other things).
|
||||
continue
|
||||
|
||||
activity_object_add(
|
||||
item['user'],
|
||||
verb,
|
||||
'node',
|
||||
item['_id'],
|
||||
'node',
|
||||
context_object_id
|
||||
)
|
||||
|
||||
|
||||
def deduct_content_type_and_duration(node_doc, original=None):
|
||||
"""Deduct the content type from the attached file, if any."""
|
||||
|
||||
if node_doc['node_type'] != 'asset':
|
||||
log.debug('deduct_content_type: called on node type %r, ignoring', node_doc['node_type'])
|
||||
return
|
||||
|
||||
node_id = node_doc.get('_id')
|
||||
try:
|
||||
file_id = ObjectId(node_doc['properties']['file'])
|
||||
except KeyError:
|
||||
if node_id is None:
|
||||
# Creation of a file-less node is allowed, but updates aren't.
|
||||
return
|
||||
log.warning('deduct_content_type: Asset without properties.file, rejecting.')
|
||||
raise wz_exceptions.UnprocessableEntity('Missing file property for asset node')
|
||||
|
||||
files = current_app.data.driver.db['files']
|
||||
file_doc = files.find_one({'_id': file_id},
|
||||
{'content_type': 1,
|
||||
'variations': 1})
|
||||
if not file_doc:
|
||||
log.warning('deduct_content_type: Node %s refers to non-existing file %s, rejecting.',
|
||||
node_id, file_id)
|
||||
raise wz_exceptions.UnprocessableEntity('File property refers to non-existing file')
|
||||
|
||||
# Guess the node content type from the file content type
|
||||
file_type = file_doc['content_type']
|
||||
if file_type.startswith('video/'):
|
||||
content_type = 'video'
|
||||
elif file_type.startswith('image/'):
|
||||
content_type = 'image'
|
||||
else:
|
||||
content_type = 'file'
|
||||
|
||||
node_doc['properties']['content_type'] = content_type
|
||||
|
||||
if content_type == 'video':
|
||||
duration = file_doc['variations'][0].get('duration')
|
||||
if duration:
|
||||
node_doc['properties']['duration_seconds'] = duration
|
||||
else:
|
||||
log.warning('Video file %s has no duration', file_id)
|
||||
|
||||
|
||||
def nodes_deduct_content_type_and_duration(nodes):
|
||||
for node in nodes:
|
||||
deduct_content_type_and_duration(node)
|
||||
|
||||
|
||||
def node_set_default_picture(node, original=None):
|
||||
"""Uses the image of an image asset or colour map of texture node as picture."""
|
||||
|
||||
if node.get('picture'):
|
||||
log.debug('Node %s already has a picture, not overriding', node.get('_id'))
|
||||
return
|
||||
|
||||
node_type = node.get('node_type')
|
||||
props = node.get('properties', {})
|
||||
content = props.get('content_type')
|
||||
|
||||
if node_type == 'asset' and content == 'image':
|
||||
image_file_id = props.get('file')
|
||||
elif node_type == 'texture':
|
||||
# Find the colour map, defaulting to the first image map available.
|
||||
image_file_id = None
|
||||
for image in props.get('files', []):
|
||||
if image_file_id is None or image.get('map_type') == 'color':
|
||||
image_file_id = image.get('file')
|
||||
else:
|
||||
log.debug('Not setting default picture on node type %s content type %s',
|
||||
node_type, content)
|
||||
return
|
||||
|
||||
if image_file_id is None:
|
||||
log.debug('Nothing to set the picture to.')
|
||||
return
|
||||
|
||||
log.debug('Setting default picture for node %s to %s', node.get('_id'), image_file_id)
|
||||
node['picture'] = image_file_id
|
||||
|
||||
|
||||
def nodes_set_default_picture(nodes):
|
||||
for node in nodes:
|
||||
node_set_default_picture(node)
|
||||
|
||||
|
||||
def before_deleting_node(node: dict):
|
||||
check_permissions('nodes', node, 'DELETE')
|
||||
remove_project_references(node)
|
||||
|
||||
|
||||
def remove_project_references(node):
|
||||
project_id = node.get('project')
|
||||
if not project_id:
|
||||
return
|
||||
|
||||
node_id = node['_id']
|
||||
log.info('Removing references to node %s from project %s', node_id, project_id)
|
||||
|
||||
projects_col = current_app.db('projects')
|
||||
project = projects_col.find_one({'_id': project_id})
|
||||
updates = collections.defaultdict(dict)
|
||||
|
||||
if project.get('header_node') == node_id:
|
||||
updates['$unset']['header_node'] = node_id
|
||||
|
||||
project_reference_lists = ('nodes_blog', 'nodes_featured', 'nodes_latest')
|
||||
for list_name in project_reference_lists:
|
||||
references = project.get(list_name)
|
||||
if not references:
|
||||
continue
|
||||
try:
|
||||
references.remove(node_id)
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
updates['$set'][list_name] = references
|
||||
|
||||
if not updates:
|
||||
return
|
||||
|
||||
updates['$set']['_etag'] = random_etag()
|
||||
result = projects_col.update_one({'_id': project_id}, updates)
|
||||
if result.modified_count != 1:
|
||||
log.warning('Removing references to node %s from project %s resulted in %d modified documents (expected 1)',
|
||||
node_id, project_id, result.modified_count)
|
||||
|
||||
|
||||
def after_deleting_node(item):
|
||||
from pillar.celery import search_index_tasks as index
|
||||
index.node_delete.delay(str(item['_id']))
|
||||
|
||||
|
||||
only_for_textures = only_for_node_type_decorator('texture')
|
||||
|
||||
|
||||
@only_for_textures
|
||||
def texture_sort_files(node, original=None):
|
||||
"""Sort files alphabetically by map type, with colour map first."""
|
||||
|
||||
try:
|
||||
files = node['properties']['files']
|
||||
except KeyError:
|
||||
return
|
||||
|
||||
# Sort the map types alphabetically, ensuring 'color' comes first.
|
||||
as_dict = {f['map_type']: f for f in files}
|
||||
types = sorted(as_dict.keys(), key=lambda k: '\0' if k == 'color' else k)
|
||||
node['properties']['files'] = [as_dict[map_type] for map_type in types]
|
||||
|
||||
|
||||
def textures_sort_files(nodes):
|
||||
for node in nodes:
|
||||
texture_sort_files(node)
|
||||
|
||||
|
||||
def parse_markdown(node, original=None):
|
||||
import copy
|
||||
|
||||
projects_collection = current_app.data.driver.db['projects']
|
||||
project = projects_collection.find_one({'_id': node['project']}, {'node_types': 1})
|
||||
# Query node type directly using the key
|
||||
node_type = next(nt for nt in project['node_types']
|
||||
if nt['name'] == node['node_type'])
|
||||
|
||||
# Create a copy to not overwrite the actual schema.
|
||||
schema = copy.deepcopy(current_app.config['DOMAIN']['nodes']['schema'])
|
||||
schema['properties'] = node_type['dyn_schema']
|
||||
|
||||
def find_markdown_fields(schema, node):
|
||||
"""Find and process all makrdown validated fields."""
|
||||
for k, v in schema.items():
|
||||
if not isinstance(v, dict):
|
||||
continue
|
||||
|
||||
if v.get('validator') == 'markdown':
|
||||
# If there is a match with the validator: markdown pair, assign the sibling
|
||||
# property (following the naming convention _<property>_html)
|
||||
# the processed value.
|
||||
if k in node:
|
||||
html = pillar.markdown.markdown(node[k])
|
||||
field_name = pillar.markdown.cache_field_name(k)
|
||||
node[field_name] = html
|
||||
if isinstance(node, dict) and k in node:
|
||||
find_markdown_fields(v, node[k])
|
||||
|
||||
find_markdown_fields(schema, node)
|
||||
|
||||
return 'ok'
|
||||
|
||||
|
||||
def parse_markdowns(items):
|
||||
for item in items:
|
||||
parse_markdown(item)
|
||||
|
||||
|
||||
def short_link_info(short_code):
|
||||
"""Returns the short link info in a dict."""
|
||||
|
||||
short_link = urllib.parse.urljoin(
|
||||
current_app.config['SHORT_LINK_BASE_URL'], short_code)
|
||||
|
||||
return {
|
||||
'short_code': short_code,
|
||||
'short_link': short_link,
|
||||
}
|
@@ -1,7 +1,7 @@
|
||||
"""Code for moving around nodes."""
|
||||
|
||||
import attr
|
||||
import flask_pymongo.wrappers
|
||||
import pymongo.database
|
||||
from bson import ObjectId
|
||||
|
||||
from pillar import attrs_extra
|
||||
@@ -10,7 +10,7 @@ import pillar.api.file_storage.moving
|
||||
|
||||
@attr.s
|
||||
class NodeMover(object):
|
||||
db = attr.ib(validator=attr.validators.instance_of(flask_pymongo.wrappers.Database))
|
||||
db = attr.ib(validator=attr.validators.instance_of(pymongo.database.Database))
|
||||
skip_gcs = attr.ib(default=False, validator=attr.validators.instance_of(bool))
|
||||
_log = attrs_extra.log('%s.NodeMover' % __name__)
|
||||
|
||||
|
@@ -71,14 +71,19 @@ def before_delete_project(document):
|
||||
|
||||
def after_delete_project(project: dict):
|
||||
"""Perform delete on the project's files too."""
|
||||
|
||||
from werkzeug.exceptions import NotFound
|
||||
from eve.methods.delete import delete
|
||||
|
||||
pid = project['_id']
|
||||
log.info('Project %s was deleted, also deleting its files.', pid)
|
||||
|
||||
r, _, _, status = delete('files', {'project': pid})
|
||||
try:
|
||||
r, _, _, status = delete('files', {'project': pid})
|
||||
except NotFound:
|
||||
# There were no files, and that's fine.
|
||||
return
|
||||
if status != 204:
|
||||
# Will never happen because bloody Eve always returns 204 or raises an exception.
|
||||
log.warning('Unable to delete files of project %s: %s', pid, r)
|
||||
|
||||
|
||||
|
@@ -81,6 +81,7 @@ class Node(es.DocType):
|
||||
fields={
|
||||
'id': es.Keyword(),
|
||||
'name': es.Keyword(),
|
||||
'url': es.Keyword(),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -153,18 +154,21 @@ def create_doc_from_node_data(node_to_index: dict) -> typing.Optional[Node]:
|
||||
doc.objectID = str(node_to_index['objectID'])
|
||||
doc.node_type = node_to_index['node_type']
|
||||
doc.name = node_to_index['name']
|
||||
doc.description = node_to_index.get('description')
|
||||
doc.user.id = str(node_to_index['user']['_id'])
|
||||
doc.user.name = node_to_index['user']['full_name']
|
||||
doc.project.id = str(node_to_index['project']['_id'])
|
||||
doc.project.name = node_to_index['project']['name']
|
||||
doc.project.url = node_to_index['project']['url']
|
||||
|
||||
if node_to_index['node_type'] == 'asset':
|
||||
doc.media = node_to_index['media']
|
||||
|
||||
doc.picture = node_to_index.get('picture')
|
||||
doc.picture = str(node_to_index.get('picture'))
|
||||
|
||||
doc.tags = node_to_index.get('tags')
|
||||
doc.license_notes = node_to_index.get('license_notes')
|
||||
doc.is_free = node_to_index.get('is_free')
|
||||
|
||||
doc.created_at = node_to_index['created']
|
||||
doc.updated_at = node_to_index['updated']
|
||||
|
@@ -3,16 +3,18 @@ import logging
|
||||
import typing
|
||||
|
||||
from elasticsearch import Elasticsearch
|
||||
from elasticsearch_dsl import Search, Q
|
||||
from elasticsearch_dsl import Search, Q, MultiSearch
|
||||
from elasticsearch_dsl.query import Query
|
||||
|
||||
from pillar import current_app
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
NODE_AGG_TERMS = ['node_type', 'media', 'tags', 'is_free']
|
||||
BOOLEAN_TERMS = ['is_free']
|
||||
NODE_AGG_TERMS = ['node_type', 'media', 'tags', *BOOLEAN_TERMS]
|
||||
USER_AGG_TERMS = ['roles', ]
|
||||
ITEMS_PER_PAGE = 10
|
||||
USER_SOURCE_INCLUDE = ['full_name', 'objectID', 'username']
|
||||
|
||||
# Will be set in setup_app()
|
||||
client: Elasticsearch = None
|
||||
@@ -27,26 +29,25 @@ def add_aggs_to_search(search, agg_terms):
|
||||
search.aggs.bucket(term, 'terms', field=term)
|
||||
|
||||
|
||||
def make_must(must: list, terms: dict) -> list:
|
||||
def make_filter(must: list, terms: dict) -> list:
|
||||
""" Given term parameters append must queries to the must list """
|
||||
|
||||
for field, value in terms.items():
|
||||
if value:
|
||||
must.append({'match': {field: value}})
|
||||
if value not in (None, ''):
|
||||
must.append({'term': {field: value}})
|
||||
|
||||
return must
|
||||
|
||||
|
||||
def nested_bool(must: list, should: list, terms: dict, *, index_alias: str) -> Search:
|
||||
def nested_bool(filters: list, should: list, terms: dict, *, index_alias: str) -> Search:
|
||||
"""
|
||||
Create a nested bool, where the aggregation selection is a must.
|
||||
|
||||
:param index_alias: 'USER' or 'NODE', see ELASTIC_INDICES config.
|
||||
"""
|
||||
must = make_must(must, terms)
|
||||
filters = make_filter(filters, terms)
|
||||
bool_query = Q('bool', should=should)
|
||||
must.append(bool_query)
|
||||
bool_query = Q('bool', must=must)
|
||||
bool_query = Q('bool', must=bool_query, filter=filters)
|
||||
|
||||
index = current_app.config['ELASTIC_INDICES'][index_alias]
|
||||
search = Search(using=client, index=index)
|
||||
@@ -55,12 +56,34 @@ def nested_bool(must: list, should: list, terms: dict, *, index_alias: str) -> S
|
||||
return search
|
||||
|
||||
|
||||
def do_multi_node_search(queries: typing.List[dict]) -> typing.List[dict]:
|
||||
"""
|
||||
Given user query input and term refinements
|
||||
search for public published nodes
|
||||
"""
|
||||
search = create_multi_node_search(queries)
|
||||
return _execute_multi(search)
|
||||
|
||||
|
||||
def do_node_search(query: str, terms: dict, page: int, project_id: str='') -> dict:
|
||||
"""
|
||||
Given user query input and term refinements
|
||||
search for public published nodes
|
||||
"""
|
||||
search = create_node_search(query, terms, page, project_id)
|
||||
return _execute(search)
|
||||
|
||||
|
||||
def create_multi_node_search(queries: typing.List[dict]) -> MultiSearch:
|
||||
search = MultiSearch(using=client)
|
||||
for q in queries:
|
||||
search = search.add(create_node_search(**q))
|
||||
|
||||
return search
|
||||
|
||||
|
||||
def create_node_search(query: str, terms: dict, page: int, project_id: str='') -> Search:
|
||||
terms = _transform_terms(terms)
|
||||
should = [
|
||||
Q('match', name=query),
|
||||
|
||||
@@ -71,52 +94,30 @@ def do_node_search(query: str, terms: dict, page: int, project_id: str='') -> di
|
||||
Q('term', media=query),
|
||||
Q('term', tags=query),
|
||||
]
|
||||
|
||||
must = []
|
||||
filters = []
|
||||
if project_id:
|
||||
must.append({'term': {'project.id': project_id}})
|
||||
|
||||
filters.append({'term': {'project.id': project_id}})
|
||||
if not query:
|
||||
should = []
|
||||
|
||||
search = nested_bool(must, should, terms, index_alias='NODE')
|
||||
search = nested_bool(filters, should, terms, index_alias='NODE')
|
||||
if not query:
|
||||
search = search.sort('-created_at')
|
||||
add_aggs_to_search(search, NODE_AGG_TERMS)
|
||||
search = paginate(search, page)
|
||||
|
||||
if log.isEnabledFor(logging.DEBUG):
|
||||
log.debug(json.dumps(search.to_dict(), indent=4))
|
||||
|
||||
response = search.execute()
|
||||
|
||||
if log.isEnabledFor(logging.DEBUG):
|
||||
log.debug(json.dumps(response.to_dict(), indent=4))
|
||||
|
||||
return response.to_dict()
|
||||
return search
|
||||
|
||||
|
||||
def do_user_search(query: str, terms: dict, page: int) -> dict:
|
||||
""" return user objects represented in elasicsearch result dict"""
|
||||
|
||||
must, should = _common_user_search(query)
|
||||
search = nested_bool(must, should, terms, index_alias='USER')
|
||||
add_aggs_to_search(search, USER_AGG_TERMS)
|
||||
search = paginate(search, page)
|
||||
|
||||
if log.isEnabledFor(logging.DEBUG):
|
||||
log.debug(json.dumps(search.to_dict(), indent=4))
|
||||
|
||||
response = search.execute()
|
||||
|
||||
if log.isEnabledFor(logging.DEBUG):
|
||||
log.debug(json.dumps(response.to_dict(), indent=4))
|
||||
|
||||
return response.to_dict()
|
||||
search = create_user_search(query, terms, page)
|
||||
return _execute(search)
|
||||
|
||||
|
||||
def _common_user_search(query: str) -> (typing.List[Query], typing.List[Query]):
|
||||
"""Construct (must,shoud) for regular + admin user search."""
|
||||
"""Construct (filter,should) for regular + admin user search."""
|
||||
if not query:
|
||||
return [], []
|
||||
|
||||
@@ -144,8 +145,31 @@ def do_user_search_admin(query: str, terms: dict, page: int) -> dict:
|
||||
search all user fields and provide aggregation information
|
||||
"""
|
||||
|
||||
must, should = _common_user_search(query)
|
||||
search = create_user_admin_search(query, terms, page)
|
||||
return _execute(search)
|
||||
|
||||
|
||||
def _execute(search: Search) -> dict:
|
||||
if log.isEnabledFor(logging.DEBUG):
|
||||
log.debug(json.dumps(search.to_dict(), indent=4))
|
||||
resp = search.execute()
|
||||
if log.isEnabledFor(logging.DEBUG):
|
||||
log.debug(json.dumps(resp.to_dict(), indent=4))
|
||||
return resp.to_dict()
|
||||
|
||||
|
||||
def _execute_multi(search: typing.List[Search]) -> typing.List[dict]:
|
||||
if log.isEnabledFor(logging.DEBUG):
|
||||
log.debug(json.dumps(search.to_dict(), indent=4))
|
||||
resp = search.execute()
|
||||
if log.isEnabledFor(logging.DEBUG):
|
||||
log.debug(json.dumps(resp.to_dict(), indent=4))
|
||||
return [r.to_dict() for r in resp]
|
||||
|
||||
|
||||
def create_user_admin_search(query: str, terms: dict, page: int) -> Search:
|
||||
terms = _transform_terms(terms)
|
||||
filters, should = _common_user_search(query)
|
||||
if query:
|
||||
# We most likely got and id field. we should find it.
|
||||
if len(query) == len('563aca02c379cf0005e8e17d'):
|
||||
@@ -155,26 +179,34 @@ def do_user_search_admin(query: str, terms: dict, page: int) -> dict:
|
||||
'boost': 100, # how much more it counts for the score
|
||||
}
|
||||
}})
|
||||
|
||||
search = nested_bool(must, should, terms, index_alias='USER')
|
||||
search = nested_bool(filters, should, terms, index_alias='USER')
|
||||
add_aggs_to_search(search, USER_AGG_TERMS)
|
||||
search = paginate(search, page)
|
||||
return search
|
||||
|
||||
if log.isEnabledFor(logging.DEBUG):
|
||||
log.debug(json.dumps(search.to_dict(), indent=4))
|
||||
|
||||
response = search.execute()
|
||||
|
||||
if log.isEnabledFor(logging.DEBUG):
|
||||
log.debug(json.dumps(response.to_dict(), indent=4))
|
||||
|
||||
return response.to_dict()
|
||||
def create_user_search(query: str, terms: dict, page: int) -> Search:
|
||||
search = create_user_admin_search(query, terms, page)
|
||||
return search.source(include=USER_SOURCE_INCLUDE)
|
||||
|
||||
|
||||
def paginate(search: Search, page_idx: int) -> Search:
|
||||
return search[page_idx * ITEMS_PER_PAGE:(page_idx + 1) * ITEMS_PER_PAGE]
|
||||
|
||||
|
||||
def _transform_terms(terms: dict) -> dict:
|
||||
"""
|
||||
Ugly hack! Elastic uses 1/0 for boolean values in its aggregate response,
|
||||
but expects true/false in queries.
|
||||
"""
|
||||
transformed = terms.copy()
|
||||
for t in BOOLEAN_TERMS:
|
||||
orig = transformed.get(t)
|
||||
if orig in ('1', '0'):
|
||||
transformed[t] = bool(int(orig))
|
||||
return transformed
|
||||
|
||||
|
||||
def setup_app(app):
|
||||
global client
|
||||
|
||||
|
@@ -18,7 +18,7 @@ TERMS = [
|
||||
]
|
||||
|
||||
|
||||
def _term_filters() -> dict:
|
||||
def _term_filters(args) -> dict:
|
||||
"""
|
||||
Check if frontent wants to filter stuff
|
||||
on specific fields AKA facets
|
||||
@@ -26,35 +26,53 @@ def _term_filters() -> dict:
|
||||
return mapping with term field name
|
||||
and provided user term value
|
||||
"""
|
||||
return {term: request.args.get(term, '') for term in TERMS}
|
||||
return {term: args.get(term, '') for term in TERMS}
|
||||
|
||||
|
||||
def _page_index() -> int:
|
||||
def _page_index(page) -> int:
|
||||
"""Return the page index from the query string."""
|
||||
try:
|
||||
page_idx = int(request.args.get('page') or '0')
|
||||
page_idx = int(page)
|
||||
except TypeError:
|
||||
log.info('invalid page number %r received', request.args.get('page'))
|
||||
raise wz_exceptions.BadRequest()
|
||||
return page_idx
|
||||
|
||||
|
||||
@blueprint_search.route('/')
|
||||
@blueprint_search.route('/', methods=['GET'])
|
||||
def search_nodes():
|
||||
searchword = request.args.get('q', '')
|
||||
project_id = request.args.get('project', '')
|
||||
terms = _term_filters()
|
||||
page_idx = _page_index()
|
||||
terms = _term_filters(request.args)
|
||||
page_idx = _page_index(request.args.get('page', 0))
|
||||
|
||||
result = queries.do_node_search(searchword, terms, page_idx, project_id)
|
||||
return jsonify(result)
|
||||
|
||||
@blueprint_search.route('/multisearch', methods=['GET'])
|
||||
def multi_search_nodes():
|
||||
import json
|
||||
if len(request.args) != 1:
|
||||
log.info(f'Expected 1 argument, received {len(request.args)}')
|
||||
|
||||
json_obj = json.loads([a for a in request.args][0])
|
||||
q = []
|
||||
for row in json_obj:
|
||||
q.append({
|
||||
'query': row.get('q', ''),
|
||||
'project_id': row.get('project', ''),
|
||||
'terms': _term_filters(row),
|
||||
'page': _page_index(row.get('page', 0))
|
||||
})
|
||||
|
||||
result = queries.do_multi_node_search(q)
|
||||
return jsonify(result)
|
||||
|
||||
@blueprint_search.route('/user')
|
||||
def search_user():
|
||||
searchword = request.args.get('q', '')
|
||||
terms = _term_filters()
|
||||
page_idx = _page_index()
|
||||
terms = _term_filters(request.args)
|
||||
page_idx = _page_index(request.args.get('page', 0))
|
||||
# result is the raw elasticseach output.
|
||||
# we need to filter fields in case of user objects.
|
||||
|
||||
@@ -65,27 +83,6 @@ def search_user():
|
||||
resp.status_code = 500
|
||||
return resp
|
||||
|
||||
# filter sensitive stuff
|
||||
# we only need. objectID, full_name, username
|
||||
hits = result.get('hits', {})
|
||||
|
||||
new_hits = []
|
||||
|
||||
for hit in hits.get('hits'):
|
||||
source = hit['_source']
|
||||
single_hit = {
|
||||
'_source': {
|
||||
'objectID': source.get('objectID'),
|
||||
'username': source.get('username'),
|
||||
'full_name': source.get('full_name'),
|
||||
}
|
||||
}
|
||||
|
||||
new_hits.append(single_hit)
|
||||
|
||||
# replace search result with safe subset
|
||||
result['hits']['hits'] = new_hits
|
||||
|
||||
return jsonify(result)
|
||||
|
||||
|
||||
@@ -97,8 +94,8 @@ def search_user_admin():
|
||||
"""
|
||||
|
||||
searchword = request.args.get('q', '')
|
||||
terms = _term_filters()
|
||||
page_idx = _page_index()
|
||||
terms = _term_filters(request.args)
|
||||
page_idx = _page_index(_page_index(request.args.get('page', 0)))
|
||||
|
||||
try:
|
||||
result = queries.do_user_search_admin(searchword, terms, page_idx)
|
||||
|
373
pillar/api/timeline.py
Normal file
373
pillar/api/timeline.py
Normal file
@@ -0,0 +1,373 @@
|
||||
import itertools
|
||||
import typing
|
||||
from datetime import datetime
|
||||
from operator import itemgetter
|
||||
|
||||
import attr
|
||||
import bson
|
||||
import pymongo
|
||||
from flask import Blueprint, current_app, request, url_for
|
||||
|
||||
import pillar
|
||||
from pillar import shortcodes
|
||||
from pillar.api.utils import jsonify, pretty_duration, str2id
|
||||
|
||||
blueprint = Blueprint('timeline', __name__)
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True)
|
||||
class TimelineDO:
|
||||
groups: typing.List['GroupDO'] = []
|
||||
continue_from: typing.Optional[float] = None
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True)
|
||||
class GroupDO:
|
||||
label: typing.Optional[str] = None
|
||||
url: typing.Optional[str] = None
|
||||
items: typing.Dict = {}
|
||||
groups: typing.Iterable['GroupDO'] = []
|
||||
|
||||
|
||||
class SearchHelper:
|
||||
def __init__(self, nbr_of_weeks: int, continue_from: typing.Optional[datetime],
|
||||
project_ids: typing.List[bson.ObjectId], sort_direction: str):
|
||||
self._nbr_of_weeks = nbr_of_weeks
|
||||
self._continue_from = continue_from
|
||||
self._project_ids = project_ids
|
||||
self.sort_direction = sort_direction
|
||||
|
||||
def _match(self, continue_from: typing.Optional[datetime]) -> dict:
|
||||
created = {}
|
||||
if continue_from:
|
||||
if self.sort_direction == 'desc':
|
||||
created = {'_created': {'$lt': continue_from}}
|
||||
else:
|
||||
created = {'_created': {'$gt': continue_from}}
|
||||
return {'_deleted': {'$ne': True},
|
||||
'node_type': {'$in': ['asset', 'post']},
|
||||
'project': {'$in': self._project_ids},
|
||||
**created,
|
||||
}
|
||||
|
||||
def raw_weeks_from_mongo(self) -> pymongo.collection.Collection:
|
||||
direction = pymongo.DESCENDING if self.sort_direction == 'desc' else pymongo.ASCENDING
|
||||
nodes_coll = current_app.db('nodes')
|
||||
return nodes_coll.aggregate([
|
||||
{'$match': self._match(self._continue_from)},
|
||||
{'$lookup': {"from": "projects",
|
||||
"localField": "project",
|
||||
"foreignField": "_id",
|
||||
"as": "project"}},
|
||||
{'$unwind': {'path': "$project"}},
|
||||
{'$lookup': {"from": "users",
|
||||
"localField": "user",
|
||||
"foreignField": "_id",
|
||||
"as": "user"}},
|
||||
{'$unwind': {'path': "$user"}},
|
||||
{'$project': {
|
||||
'_created': 1,
|
||||
'project._id': 1,
|
||||
'project.url': 1,
|
||||
'project.name': 1,
|
||||
'user._id': 1,
|
||||
'user.full_name': 1,
|
||||
'name': 1,
|
||||
'node_type': 1,
|
||||
'picture': 1,
|
||||
'properties': 1,
|
||||
'permissions': 1,
|
||||
}},
|
||||
{'$group': {
|
||||
'_id': {'year': {'$isoWeekYear': '$_created'},
|
||||
'week': {'$isoWeek': '$_created'}},
|
||||
'nodes': {'$push': '$$ROOT'}
|
||||
}},
|
||||
{'$sort': {'_id.year': direction,
|
||||
'_id.week': direction}},
|
||||
{'$limit': self._nbr_of_weeks}
|
||||
])
|
||||
|
||||
def has_more(self, continue_from: datetime) -> bool:
|
||||
nodes_coll = current_app.db('nodes')
|
||||
result = nodes_coll.count(self._match(continue_from))
|
||||
return bool(result)
|
||||
|
||||
|
||||
class Grouper:
|
||||
@classmethod
|
||||
def label(cls, node):
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def url(cls, node):
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def group_key(cls) -> typing.Callable[[dict], typing.Any]:
|
||||
raise NotImplemented()
|
||||
|
||||
@classmethod
|
||||
def sort_key(cls) -> typing.Callable[[dict], typing.Any]:
|
||||
raise NotImplemented()
|
||||
|
||||
|
||||
class ProjectGrouper(Grouper):
|
||||
@classmethod
|
||||
def label(cls, project: dict):
|
||||
return project['name']
|
||||
|
||||
@classmethod
|
||||
def url(cls, project: dict):
|
||||
return url_for('projects.view', project_url=project['url'])
|
||||
|
||||
@classmethod
|
||||
def group_key(cls) -> typing.Callable[[dict], typing.Any]:
|
||||
return itemgetter('project')
|
||||
|
||||
@classmethod
|
||||
def sort_key(cls) -> typing.Callable[[dict], typing.Any]:
|
||||
return lambda node: node['project']['_id']
|
||||
|
||||
|
||||
class UserGrouper(Grouper):
|
||||
@classmethod
|
||||
def label(cls, user):
|
||||
return user['full_name']
|
||||
|
||||
@classmethod
|
||||
def group_key(cls) -> typing.Callable[[dict], typing.Any]:
|
||||
return itemgetter('user')
|
||||
|
||||
@classmethod
|
||||
def sort_key(cls) -> typing.Callable[[dict], typing.Any]:
|
||||
return lambda node: node['user']['_id']
|
||||
|
||||
|
||||
class TimeLineBuilder:
|
||||
def __init__(self, search_helper: SearchHelper, grouper: typing.Type[Grouper]):
|
||||
self.search_helper = search_helper
|
||||
self.grouper = grouper
|
||||
self.continue_from = None
|
||||
|
||||
def build(self) -> TimelineDO:
|
||||
raw_weeks = self.search_helper.raw_weeks_from_mongo()
|
||||
clean_weeks = (self.create_week_group(week) for week in raw_weeks)
|
||||
|
||||
return TimelineDO(
|
||||
groups=list(clean_weeks),
|
||||
continue_from=self.continue_from.timestamp() if self.search_helper.has_more(self.continue_from) else None
|
||||
)
|
||||
|
||||
def create_week_group(self, week: dict) -> GroupDO:
|
||||
nodes = week['nodes']
|
||||
nodes.sort(key=itemgetter('_created'), reverse=True)
|
||||
self.update_continue_from(nodes)
|
||||
groups = self.create_groups(nodes)
|
||||
|
||||
return GroupDO(
|
||||
label=f'Week {week["_id"]["week"]}, {week["_id"]["year"]}',
|
||||
groups=groups
|
||||
)
|
||||
|
||||
def create_groups(self, nodes: typing.List[dict]) -> typing.List[GroupDO]:
|
||||
self.sort_nodes(nodes) # groupby assumes that the list is sorted
|
||||
nodes_grouped = itertools.groupby(nodes, self.grouper.group_key())
|
||||
groups = (self.clean_group(grouped_by, group) for grouped_by, group in nodes_grouped)
|
||||
groups_sorted = sorted(groups, key=self.group_row_sorter, reverse=True)
|
||||
return groups_sorted
|
||||
|
||||
def sort_nodes(self, nodes: typing.List[dict]):
|
||||
nodes.sort(key=itemgetter('node_type'))
|
||||
nodes.sort(key=self.grouper.sort_key())
|
||||
|
||||
def update_continue_from(self, sorted_nodes: typing.List[dict]):
|
||||
if self.search_helper.sort_direction == 'desc':
|
||||
first_created = sorted_nodes[-1]['_created']
|
||||
candidate = self.continue_from or first_created
|
||||
self.continue_from = min(candidate, first_created)
|
||||
else:
|
||||
last_created = sorted_nodes[0]['_created']
|
||||
candidate = self.continue_from or last_created
|
||||
self.continue_from = max(candidate, last_created)
|
||||
|
||||
def clean_group(self, grouped_by: typing.Any, group: typing.Iterable[dict]) -> GroupDO:
|
||||
items = self.create_items(group)
|
||||
return GroupDO(
|
||||
label=self.grouper.label(grouped_by),
|
||||
url=self.grouper.url(grouped_by),
|
||||
items=items
|
||||
)
|
||||
|
||||
def create_items(self, group) -> typing.List[dict]:
|
||||
by_node_type = itertools.groupby(group, key=itemgetter('node_type'))
|
||||
items = {}
|
||||
for node_type, nodes in by_node_type:
|
||||
items[node_type] = [self.node_prettyfy(n) for n in nodes]
|
||||
return items
|
||||
|
||||
@classmethod
|
||||
def node_prettyfy(cls, node: dict)-> dict:
|
||||
duration_seconds = node['properties'].get('duration_seconds')
|
||||
if duration_seconds is not None:
|
||||
node['properties']['duration'] = pretty_duration(duration_seconds)
|
||||
if node['node_type'] == 'post':
|
||||
html = _get_markdowned_html(node['properties'], 'content')
|
||||
html = shortcodes.render_commented(html, context=node['properties'])
|
||||
node['properties']['pretty_content'] = html
|
||||
return node
|
||||
|
||||
@classmethod
|
||||
def group_row_sorter(cls, row: GroupDO) -> typing.Tuple[datetime, datetime]:
|
||||
'''
|
||||
If a group contains posts are more interesting and therefor we put them higher in up
|
||||
:param row:
|
||||
:return: tuple with newest post date and newest asset date
|
||||
'''
|
||||
def newest_created(nodes: typing.List[dict]) -> datetime:
|
||||
if nodes:
|
||||
return nodes[0]['_created']
|
||||
return datetime.fromtimestamp(0, tz=bson.tz_util.utc)
|
||||
newest_post_date = newest_created(row.items.get('post'))
|
||||
newest_asset_date = newest_created(row.items.get('asset'))
|
||||
return newest_post_date, newest_asset_date
|
||||
|
||||
|
||||
def _public_project_ids() -> typing.List[bson.ObjectId]:
|
||||
"""Returns a list of ObjectIDs of public projects.
|
||||
|
||||
Memoized in setup_app().
|
||||
"""
|
||||
|
||||
proj_coll = current_app.db('projects')
|
||||
result = proj_coll.find({'is_private': False}, {'_id': 1})
|
||||
return [p['_id'] for p in result]
|
||||
|
||||
|
||||
def _get_markdowned_html(document: dict, field_name: str) -> str:
|
||||
cache_field_name = pillar.markdown.cache_field_name(field_name)
|
||||
html = document.get(cache_field_name)
|
||||
if html is None:
|
||||
markdown_src = document.get(field_name) or ''
|
||||
html = pillar.markdown.markdown(markdown_src)
|
||||
return html
|
||||
|
||||
|
||||
@blueprint.route('/', methods=['GET'])
|
||||
def global_timeline():
|
||||
continue_from_str = request.args.get('from')
|
||||
continue_from = parse_continue_from(continue_from_str)
|
||||
nbr_of_weeks_str = request.args.get('weeksToLoad')
|
||||
nbr_of_weeks = parse_nbr_of_weeks(nbr_of_weeks_str)
|
||||
sort_direction = request.args.get('dir', 'desc')
|
||||
return _global_timeline(continue_from, nbr_of_weeks, sort_direction)
|
||||
|
||||
|
||||
@blueprint.route('/p/<string(length=24):pid_path>', methods=['GET'])
|
||||
def project_timeline(pid_path: str):
|
||||
continue_from_str = request.args.get('from')
|
||||
continue_from = parse_continue_from(continue_from_str)
|
||||
nbr_of_weeks_str = request.args.get('weeksToLoad')
|
||||
nbr_of_weeks = parse_nbr_of_weeks(nbr_of_weeks_str)
|
||||
sort_direction = request.args.get('dir', 'desc')
|
||||
pid = str2id(pid_path)
|
||||
return _project_timeline(continue_from, nbr_of_weeks, sort_direction, pid)
|
||||
|
||||
|
||||
def parse_continue_from(from_arg) -> typing.Optional[datetime]:
|
||||
try:
|
||||
from_float = float(from_arg)
|
||||
except (TypeError, ValueError):
|
||||
return None
|
||||
return datetime.fromtimestamp(from_float, tz=bson.tz_util.utc)
|
||||
|
||||
|
||||
def parse_nbr_of_weeks(weeks_to_load: str) -> int:
|
||||
try:
|
||||
return int(weeks_to_load)
|
||||
except (TypeError, ValueError):
|
||||
return 3
|
||||
|
||||
|
||||
def _global_timeline(continue_from: typing.Optional[datetime], nbr_of_weeks: int, sort_direction: str):
|
||||
"""Returns an aggregated view of what has happened on the site
|
||||
Memoized in setup_app().
|
||||
|
||||
:param continue_from: Python utc timestamp where to begin aggregation
|
||||
|
||||
:param nbr_of_weeks: Number of weeks to return
|
||||
|
||||
Example output:
|
||||
{
|
||||
groups: [{
|
||||
label: 'Week 32',
|
||||
groups: [{
|
||||
label: 'Spring',
|
||||
url: '/p/spring',
|
||||
items:{
|
||||
post: [blogPostDoc, blogPostDoc],
|
||||
asset: [assetDoc, assetDoc]
|
||||
},
|
||||
groups: ...
|
||||
}]
|
||||
}],
|
||||
continue_from: 123456.2 // python timestamp
|
||||
}
|
||||
"""
|
||||
builder = TimeLineBuilder(
|
||||
SearchHelper(nbr_of_weeks, continue_from, _public_project_ids(), sort_direction),
|
||||
ProjectGrouper
|
||||
)
|
||||
return jsonify_timeline(builder.build())
|
||||
|
||||
|
||||
def jsonify_timeline(timeline: TimelineDO):
|
||||
return jsonify(
|
||||
attr.asdict(timeline,
|
||||
recurse=True,
|
||||
filter=lambda att, value: value is not None)
|
||||
)
|
||||
|
||||
|
||||
def _project_timeline(continue_from: typing.Optional[datetime], nbr_of_weeks: int, sort_direction, pid: bson.ObjectId):
|
||||
"""Returns an aggregated view of what has happened on the site
|
||||
Memoized in setup_app().
|
||||
|
||||
:param continue_from: Python utc timestamp where to begin aggregation
|
||||
|
||||
:param nbr_of_weeks: Number of weeks to return
|
||||
|
||||
Example output:
|
||||
{
|
||||
groups: [{
|
||||
label: 'Week 32',
|
||||
groups: [{
|
||||
label: 'Tobias Johansson',
|
||||
items:{
|
||||
post: [blogPostDoc, blogPostDoc],
|
||||
asset: [assetDoc, assetDoc]
|
||||
},
|
||||
groups: ...
|
||||
}]
|
||||
}],
|
||||
continue_from: 123456.2 // python timestamp
|
||||
}
|
||||
"""
|
||||
builder = TimeLineBuilder(
|
||||
SearchHelper(nbr_of_weeks, continue_from, [pid], sort_direction),
|
||||
UserGrouper
|
||||
)
|
||||
return jsonify_timeline(builder.build())
|
||||
|
||||
|
||||
def setup_app(app, url_prefix):
|
||||
global _public_project_ids
|
||||
global _global_timeline
|
||||
global _project_timeline
|
||||
|
||||
app.register_api_blueprint(blueprint, url_prefix=url_prefix)
|
||||
cached = app.cache.cached(timeout=3600)
|
||||
_public_project_ids = cached(_public_project_ids)
|
||||
memoize = app.cache.memoize(timeout=60)
|
||||
_global_timeline = memoize(_global_timeline)
|
||||
_project_timeline = memoize(_project_timeline)
|
@@ -142,7 +142,7 @@ def after_fetching_user(user):
|
||||
return
|
||||
|
||||
# Remove all fields except public ones.
|
||||
public_fields = {'full_name', 'username', 'email', 'extension_props_public'}
|
||||
public_fields = {'full_name', 'username', 'email', 'extension_props_public', 'badges'}
|
||||
for field in list(user.keys()):
|
||||
if field not in public_fields:
|
||||
del user[field]
|
||||
|
@@ -1,9 +1,11 @@
|
||||
import logging
|
||||
|
||||
from eve.methods.get import get
|
||||
from flask import Blueprint
|
||||
from flask import Blueprint, request
|
||||
import werkzeug.exceptions as wz_exceptions
|
||||
|
||||
from pillar.api.utils import jsonify
|
||||
from pillar import current_app
|
||||
from pillar.api import utils
|
||||
from pillar.api.utils.authorization import require_login
|
||||
from pillar.auth import current_user
|
||||
|
||||
@@ -15,7 +17,128 @@ blueprint_api = Blueprint('users_api', __name__)
|
||||
@require_login()
|
||||
def my_info():
|
||||
eve_resp, _, _, status, _ = get('users', {'_id': current_user.user_id})
|
||||
resp = jsonify(eve_resp['_items'][0], status=status)
|
||||
resp = utils.jsonify(eve_resp['_items'][0], status=status)
|
||||
return resp
|
||||
|
||||
|
||||
@blueprint_api.route('/video/<video_id>/progress')
|
||||
@require_login()
|
||||
def get_video_progress(video_id: str):
|
||||
"""Return video progress information.
|
||||
|
||||
Either a `204 No Content` is returned (no information stored),
|
||||
or a `200 Ok` with JSON from Eve's 'users' schema, from the key
|
||||
video.view_progress.<video_id>.
|
||||
"""
|
||||
|
||||
# Validation of the video ID; raises a BadRequest when it's not an ObjectID.
|
||||
# This isn't strictly necessary, but it makes this function behave symmetrical
|
||||
# to the set_video_progress() function.
|
||||
utils.str2id(video_id)
|
||||
|
||||
users_coll = current_app.db('users')
|
||||
user_doc = users_coll.find_one(current_user.user_id, projection={'nodes.view_progress': True})
|
||||
try:
|
||||
progress = user_doc['nodes']['view_progress'][video_id]
|
||||
except KeyError:
|
||||
return '', 204
|
||||
if not progress:
|
||||
return '', 204
|
||||
|
||||
return utils.jsonify(progress)
|
||||
|
||||
|
||||
@blueprint_api.route('/video/<video_id>/progress', methods=['POST'])
|
||||
@require_login()
|
||||
def set_video_progress(video_id: str):
|
||||
"""Save progress information about a certain video.
|
||||
|
||||
Expected parameters:
|
||||
- progress_in_sec: float number of seconds
|
||||
- progress_in_perc: integer percentage of video watched (interval [0-100])
|
||||
"""
|
||||
my_log = log.getChild('set_video_progress')
|
||||
my_log.debug('Setting video progress for user %r video %r', current_user.user_id, video_id)
|
||||
|
||||
# Constructing this response requires an active app, and thus can't be done on module load.
|
||||
no_video_response = utils.jsonify({'_message': 'No such video'}, status=404)
|
||||
|
||||
try:
|
||||
progress_in_sec = float(request.form['progress_in_sec'])
|
||||
progress_in_perc = int(request.form['progress_in_perc'])
|
||||
except KeyError as ex:
|
||||
my_log.debug('Missing POST field in request: %s', ex)
|
||||
raise wz_exceptions.BadRequest(f'missing a form field')
|
||||
except ValueError as ex:
|
||||
my_log.debug('Invalid value for POST field in request: %s', ex)
|
||||
raise wz_exceptions.BadRequest(f'Invalid value for field: {ex}')
|
||||
|
||||
users_coll = current_app.db('users')
|
||||
nodes_coll = current_app.db('nodes')
|
||||
|
||||
# First check whether this is actually an existing video
|
||||
video_oid = utils.str2id(video_id)
|
||||
video_doc = nodes_coll.find_one(video_oid, projection={
|
||||
'node_type': True,
|
||||
'properties.content_type': True,
|
||||
'properties.file': True,
|
||||
})
|
||||
if not video_doc:
|
||||
my_log.debug('Node %r not found, unable to set progress for user %r',
|
||||
video_oid, current_user.user_id)
|
||||
return no_video_response
|
||||
|
||||
try:
|
||||
is_video = (video_doc['node_type'] == 'asset'
|
||||
and video_doc['properties']['content_type'] == 'video')
|
||||
except KeyError:
|
||||
is_video = False
|
||||
|
||||
if not is_video:
|
||||
my_log.info('Node %r is not a video, unable to set progress for user %r',
|
||||
video_oid, current_user.user_id)
|
||||
# There is no video found at this URL, so act as if it doesn't even exist.
|
||||
return no_video_response
|
||||
|
||||
# Compute the progress
|
||||
percent = min(100, max(0, progress_in_perc))
|
||||
progress = {
|
||||
'progress_in_sec': progress_in_sec,
|
||||
'progress_in_percent': percent,
|
||||
'last_watched': utils.utcnow(),
|
||||
}
|
||||
|
||||
# After watching a certain percentage of the video, we consider it 'done'
|
||||
#
|
||||
# Total Credit start Total Credit Percent
|
||||
# HH:MM:SS HH:MM:SS sec sec of duration
|
||||
# Sintel 00:14:48 00:12:24 888 744 83.78%
|
||||
# Tears of Steel 00:12:14 00:09:49 734 589 80.25%
|
||||
# Cosmos Laundro 00:12:10 00:10:05 730 605 82.88%
|
||||
# Agent 327 00:03:51 00:03:26 231 206 89.18%
|
||||
# Caminandes 3 00:02:30 00:02:18 150 138 92.00%
|
||||
# Glass Half 00:03:13 00:02:52 193 172 89.12%
|
||||
# Big Buck Bunny 00:09:56 00:08:11 596 491 82.38%
|
||||
# Elephant’s Drea 00:10:54 00:09:25 654 565 86.39%
|
||||
#
|
||||
# Median 85.09%
|
||||
# Average 85.75%
|
||||
#
|
||||
# For training videos marking at done at 85% of the video may be a bit
|
||||
# early, since those probably won't have (long) credits. This is why we
|
||||
# stick to 90% here.
|
||||
if percent >= 90:
|
||||
progress['done'] = True
|
||||
|
||||
# Setting each property individually prevents us from overwriting any
|
||||
# existing {done: true} fields.
|
||||
updates = {f'nodes.view_progress.{video_id}.{k}': v
|
||||
for k, v in progress.items()}
|
||||
result = users_coll.update_one({'_id': current_user.user_id},
|
||||
{'$set': updates})
|
||||
|
||||
if result.matched_count == 0:
|
||||
my_log.error('Current user %r could not be updated', current_user.user_id)
|
||||
raise wz_exceptions.InternalServerError('Unable to find logged-in user')
|
||||
|
||||
return '', 204
|
||||
|
@@ -57,6 +57,18 @@ def remove_private_keys(document):
|
||||
return doc_copy
|
||||
|
||||
|
||||
def pretty_duration(seconds):
|
||||
if seconds is None:
|
||||
return ''
|
||||
seconds = round(seconds)
|
||||
hours, seconds = divmod(seconds, 3600)
|
||||
minutes, seconds = divmod(seconds, 60)
|
||||
if hours > 0:
|
||||
return f'{hours:02}:{minutes:02}:{seconds:02}'
|
||||
else:
|
||||
return f'{minutes:02}:{seconds:02}'
|
||||
|
||||
|
||||
class PillarJSONEncoder(json.JSONEncoder):
|
||||
"""JSON encoder with support for Pillar resources."""
|
||||
|
||||
@@ -64,6 +76,9 @@ class PillarJSONEncoder(json.JSONEncoder):
|
||||
if isinstance(obj, datetime.datetime):
|
||||
return obj.strftime(RFC1123_DATE_FORMAT)
|
||||
|
||||
if isinstance(obj, datetime.timedelta):
|
||||
return pretty_duration(obj.total_seconds())
|
||||
|
||||
if isinstance(obj, bson.ObjectId):
|
||||
return str(obj)
|
||||
|
||||
@@ -245,4 +260,10 @@ def random_etag() -> str:
|
||||
|
||||
|
||||
def utcnow() -> datetime.datetime:
|
||||
return datetime.datetime.now(tz=bson.tz_util.utc)
|
||||
"""Construct timezone-aware 'now' in UTC with millisecond precision."""
|
||||
now = datetime.datetime.now(tz=bson.tz_util.utc)
|
||||
|
||||
# MongoDB stores in millisecond precision, so truncate the microseconds.
|
||||
# This way the returned datetime can be round-tripped via MongoDB and stay the same.
|
||||
trunc_now = now.replace(microsecond=now.microsecond - (now.microsecond % 1000))
|
||||
return trunc_now
|
||||
|
@@ -13,7 +13,7 @@ import logging
|
||||
import typing
|
||||
|
||||
import bson
|
||||
from flask import g, current_app
|
||||
from flask import g, current_app, session
|
||||
from flask import request
|
||||
from werkzeug import exceptions as wz_exceptions
|
||||
|
||||
@@ -103,7 +103,7 @@ def find_user_in_db(user_info: dict, provider='blender-id') -> dict:
|
||||
return db_user
|
||||
|
||||
|
||||
def validate_token(*, force=False):
|
||||
def validate_token(*, force=False) -> bool:
|
||||
"""Validate the token provided in the request and populate the current_user
|
||||
flask.g object, so that permissions and access to a resource can be defined
|
||||
from it.
|
||||
@@ -115,7 +115,7 @@ def validate_token(*, force=False):
|
||||
:returns: True iff the user is logged in with a valid Blender ID token.
|
||||
"""
|
||||
|
||||
from pillar.auth import AnonymousUser
|
||||
import pillar.auth
|
||||
|
||||
# Trust a pre-existing g.current_user
|
||||
if not force:
|
||||
@@ -133,16 +133,22 @@ def validate_token(*, force=False):
|
||||
oauth_subclient = ''
|
||||
else:
|
||||
# Check the session, the user might be logged in through Flask-Login.
|
||||
from pillar import auth
|
||||
|
||||
token = auth.get_blender_id_oauth_token()
|
||||
# The user has a logged-in session; trust only if this request passes a CSRF check.
|
||||
# FIXME(Sybren): we should stop saving the token as 'user_id' in the sesion.
|
||||
token = session.get('user_id')
|
||||
if token:
|
||||
log.debug('skipping token check because current user already has a session')
|
||||
current_app.csrf.protect()
|
||||
else:
|
||||
token = pillar.auth.get_blender_id_oauth_token()
|
||||
oauth_subclient = None
|
||||
|
||||
if not token:
|
||||
# If no authorization headers are provided, we are getting a request
|
||||
# from a non logged in user. Proceed accordingly.
|
||||
log.debug('No authentication headers, so not logged in.')
|
||||
g.current_user = AnonymousUser()
|
||||
g.current_user = pillar.auth.AnonymousUser()
|
||||
return False
|
||||
|
||||
return validate_this_token(token, oauth_subclient) is not None
|
||||
@@ -183,7 +189,7 @@ def validate_this_token(token, oauth_subclient=None):
|
||||
return None
|
||||
|
||||
g.current_user = UserClass.construct(token, db_user)
|
||||
user_authenticated.send(None)
|
||||
user_authenticated.send(g.current_user)
|
||||
|
||||
return db_user
|
||||
|
||||
@@ -194,7 +200,7 @@ def remove_token(token: str):
|
||||
tokens_coll = current_app.db('tokens')
|
||||
token_hashed = hash_auth_token(token)
|
||||
|
||||
# TODO: remove matching on unhashed tokens once all tokens have been hashed.
|
||||
# TODO: remove matching on hashed tokens once all hashed tokens have expired.
|
||||
lookup = {'$or': [{'token': token}, {'token_hashed': token_hashed}]}
|
||||
del_res = tokens_coll.delete_many(lookup)
|
||||
log.debug('Removed token %r, matched %d documents', token, del_res.deleted_count)
|
||||
@@ -206,7 +212,7 @@ def find_token(token, is_subclient_token=False, **extra_filters):
|
||||
tokens_coll = current_app.db('tokens')
|
||||
token_hashed = hash_auth_token(token)
|
||||
|
||||
# TODO: remove matching on unhashed tokens once all tokens have been hashed.
|
||||
# TODO: remove matching on hashed tokens once all hashed tokens have expired.
|
||||
lookup = {'$or': [{'token': token}, {'token_hashed': token_hashed}],
|
||||
'is_subclient_token': True if is_subclient_token else {'$in': [False, None]},
|
||||
'expire_time': {"$gt": utcnow()}}
|
||||
@@ -229,8 +235,14 @@ def hash_auth_token(token: str) -> str:
|
||||
return base64.b64encode(digest).decode('ascii')
|
||||
|
||||
|
||||
def store_token(user_id, token: str, token_expiry, oauth_subclient_id=False,
|
||||
org_roles: typing.Set[str] = frozenset()):
|
||||
def store_token(user_id,
|
||||
token: str,
|
||||
token_expiry,
|
||||
oauth_subclient_id=False,
|
||||
*,
|
||||
org_roles: typing.Set[str] = frozenset(),
|
||||
oauth_scopes: typing.Optional[typing.List[str]] = None,
|
||||
):
|
||||
"""Stores an authentication token.
|
||||
|
||||
:returns: the token document from MongoDB
|
||||
@@ -240,13 +252,15 @@ def store_token(user_id, token: str, token_expiry, oauth_subclient_id=False,
|
||||
|
||||
token_data = {
|
||||
'user': user_id,
|
||||
'token_hashed': hash_auth_token(token),
|
||||
'token': token,
|
||||
'expire_time': token_expiry,
|
||||
}
|
||||
if oauth_subclient_id:
|
||||
token_data['is_subclient_token'] = True
|
||||
if org_roles:
|
||||
token_data['org_roles'] = sorted(org_roles)
|
||||
if oauth_scopes:
|
||||
token_data['oauth_scopes'] = oauth_scopes
|
||||
|
||||
r, _, _, status = current_app.post_internal('tokens', token_data)
|
||||
|
||||
|
@@ -1,5 +1,6 @@
|
||||
import logging
|
||||
import functools
|
||||
import typing
|
||||
|
||||
from bson import ObjectId
|
||||
from flask import g
|
||||
@@ -12,8 +13,9 @@ CHECK_PERMISSIONS_IMPLEMENTED_FOR = {'projects', 'nodes', 'flamenco_jobs'}
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def check_permissions(collection_name, resource, method, append_allowed_methods=False,
|
||||
check_node_type=None):
|
||||
def check_permissions(collection_name: str, resource: dict, method: str,
|
||||
append_allowed_methods=False,
|
||||
check_node_type: typing.Optional[str] = None):
|
||||
"""Check user permissions to access a node. We look up node permissions from
|
||||
world to groups to users and match them with the computed user permissions.
|
||||
If there is not match, we raise 403.
|
||||
@@ -93,8 +95,9 @@ def compute_allowed_methods(collection_name, resource, check_node_type=None):
|
||||
return allowed_methods
|
||||
|
||||
|
||||
def has_permissions(collection_name, resource, method, append_allowed_methods=False,
|
||||
check_node_type=None):
|
||||
def has_permissions(collection_name: str, resource: dict, method: str,
|
||||
append_allowed_methods=False,
|
||||
check_node_type: typing.Optional[str] = None):
|
||||
"""Check user permissions to access a node. We look up node permissions from
|
||||
world to groups to users and match them with the computed user permissions.
|
||||
|
||||
|
87
pillar/api/utils/rating.py
Normal file
87
pillar/api/utils/rating.py
Normal file
@@ -0,0 +1,87 @@
|
||||
# These functions come from Reddit
|
||||
# https://github.com/reddit/reddit/blob/master/r2/r2/lib/db/_sorts.pyx
|
||||
|
||||
# Additional resources
|
||||
# http://www.redditblog.com/2009/10/reddits-new-comment-sorting-system.html
|
||||
# http://www.evanmiller.org/how-not-to-sort-by-average-rating.html
|
||||
# http://amix.dk/blog/post/19588
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from math import log
|
||||
from math import sqrt
|
||||
|
||||
epoch = datetime(1970, 1, 1, 0, 0, 0, 0, timezone.utc)
|
||||
|
||||
|
||||
def epoch_seconds(date):
|
||||
"""Returns the number of seconds from the epoch to date."""
|
||||
td = date - epoch
|
||||
return td.days * 86400 + td.seconds + (float(td.microseconds) / 1000000)
|
||||
|
||||
|
||||
def score(ups, downs):
|
||||
return ups - downs
|
||||
|
||||
|
||||
def hot(ups, downs, date):
|
||||
"""The hot formula. Reddit's hot ranking uses the logarithm function to
|
||||
weight the first votes higher than the rest.
|
||||
The first 10 upvotes have the same weight as the next 100 upvotes which
|
||||
have the same weight as the next 1000, etc.
|
||||
|
||||
Dillo authors: we modified the formula to give more weight to negative
|
||||
votes when an entry is controversial.
|
||||
|
||||
TODO: make this function more dynamic so that different defaults can be
|
||||
specified depending on the item that is being rated.
|
||||
"""
|
||||
|
||||
s = score(ups, downs)
|
||||
order = log(max(abs(s), 1), 10)
|
||||
sign = 1 if s > 0 else -1 if s < 0 else 0
|
||||
seconds = epoch_seconds(date) - 1134028003
|
||||
base_hot = round(sign * order + seconds / 45000, 7)
|
||||
|
||||
if downs > 1:
|
||||
rating_delta = 100 * (downs - ups) / downs
|
||||
if rating_delta < 25:
|
||||
# The post is controversial
|
||||
return base_hot
|
||||
base_hot = base_hot - (downs * 6)
|
||||
|
||||
return base_hot
|
||||
|
||||
|
||||
def _confidence(ups, downs):
|
||||
n = ups + downs
|
||||
|
||||
if n == 0:
|
||||
return 0
|
||||
|
||||
z = 1.0 #1.0 = 85%, 1.6 = 95%
|
||||
phat = float(ups) / n
|
||||
return sqrt(phat+z*z/(2*n)-z*((phat*(1-phat)+z*z/(4*n))/n))/(1+z*z/n)
|
||||
|
||||
|
||||
def confidence(ups, downs):
|
||||
if ups + downs == 0:
|
||||
return 0
|
||||
else:
|
||||
return _confidence(ups, downs)
|
||||
|
||||
|
||||
def update_hot(document):
|
||||
"""Update the hotness of a document given its current ratings.
|
||||
|
||||
We expect the document to implement the ratings_embedded_schema in
|
||||
a 'ratings' property.
|
||||
"""
|
||||
|
||||
dt = document['_created']
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
|
||||
document['properties']['ratings']['hot'] = hot(
|
||||
document['properties']['ratings']['positive'],
|
||||
document['properties']['ratings']['negative'],
|
||||
dt,
|
||||
)
|
@@ -12,7 +12,10 @@ from werkzeug.local import LocalProxy
|
||||
|
||||
from pillar import current_app
|
||||
|
||||
# The sender is the user that was just authenticated.
|
||||
user_authenticated = blinker.Signal('Sent whenever a user was authenticated')
|
||||
user_logged_in = blinker.Signal('Sent whenever a user logged in on the web')
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# Mapping from user role to capabilities obtained by users with that role.
|
||||
@@ -38,6 +41,8 @@ class UserClass(flask_login.UserMixin):
|
||||
self.groups: typing.List[str] = [] # NOTE: these are stringified object IDs.
|
||||
self.group_ids: typing.List[bson.ObjectId] = []
|
||||
self.capabilities: typing.Set[str] = set()
|
||||
self.nodes: dict = {} # see the 'nodes' key in eve_settings.py::user_schema.
|
||||
self.badges_html: str = ''
|
||||
|
||||
# Lazily evaluated
|
||||
self._has_organizations: typing.Optional[bool] = None
|
||||
@@ -56,6 +61,12 @@ class UserClass(flask_login.UserMixin):
|
||||
user.email = db_user.get('email') or ''
|
||||
user.username = db_user.get('username') or ''
|
||||
user.full_name = db_user.get('full_name') or ''
|
||||
user.badges_html = db_user.get('badges', {}).get('html') or ''
|
||||
|
||||
# Be a little more specific than just db_user['nodes']
|
||||
user.nodes = {
|
||||
'view_progress': db_user.get('nodes', {}).get('view_progress', {}),
|
||||
}
|
||||
|
||||
# Derived properties
|
||||
user.objectid = str(user.user_id or '')
|
||||
@@ -210,9 +221,15 @@ def login_user(oauth_token: str, *, load_from_db=False):
|
||||
user = _load_user(oauth_token)
|
||||
else:
|
||||
user = UserClass(oauth_token)
|
||||
login_user_object(user)
|
||||
|
||||
|
||||
def login_user_object(user: UserClass):
|
||||
"""Log in the given user."""
|
||||
flask_login.login_user(user, remember=True)
|
||||
g.current_user = user
|
||||
user_authenticated.send(None)
|
||||
user_authenticated.send(user)
|
||||
user_logged_in.send(user)
|
||||
|
||||
|
||||
def logout_user():
|
||||
|
@@ -1,8 +1,9 @@
|
||||
import abc
|
||||
import attr
|
||||
import json
|
||||
import logging
|
||||
import typing
|
||||
|
||||
import attr
|
||||
from rauth import OAuth2Service
|
||||
from flask import current_app, url_for, request, redirect, session, Response
|
||||
|
||||
@@ -15,6 +16,8 @@ class OAuthUserResponse:
|
||||
|
||||
id = attr.ib(validator=attr.validators.instance_of(str))
|
||||
email = attr.ib(validator=attr.validators.instance_of(str))
|
||||
access_token = attr.ib(validator=attr.validators.instance_of(str))
|
||||
scopes: typing.List[str] = attr.ib(validator=attr.validators.instance_of(list))
|
||||
|
||||
|
||||
class OAuthError(Exception):
|
||||
@@ -127,25 +130,26 @@ class OAuthSignIn(metaclass=abc.ABCMeta):
|
||||
|
||||
class BlenderIdSignIn(OAuthSignIn):
|
||||
provider_name = 'blender-id'
|
||||
scopes = ['email', 'badge']
|
||||
|
||||
def __init__(self):
|
||||
from urllib.parse import urljoin
|
||||
super().__init__()
|
||||
|
||||
base_url = current_app.config['OAUTH_CREDENTIALS']['blender-id'].get(
|
||||
'base_url', 'https://www.blender.org/id/')
|
||||
base_url = current_app.config['BLENDER_ID_ENDPOINT']
|
||||
|
||||
self.service = OAuth2Service(
|
||||
name='blender-id',
|
||||
client_id=self.consumer_id,
|
||||
client_secret=self.consumer_secret,
|
||||
authorize_url='%soauth/authorize' % base_url,
|
||||
access_token_url='%soauth/token' % base_url,
|
||||
base_url='%sapi/' % base_url
|
||||
authorize_url=urljoin(base_url, 'oauth/authorize'),
|
||||
access_token_url=urljoin(base_url, 'oauth/token'),
|
||||
base_url=urljoin(base_url, 'api/'),
|
||||
)
|
||||
|
||||
def authorize(self):
|
||||
return redirect(self.service.get_authorize_url(
|
||||
scope='email',
|
||||
scope=' '.join(self.scopes),
|
||||
response_type='code',
|
||||
redirect_uri=self.get_callback_url())
|
||||
)
|
||||
@@ -159,7 +163,11 @@ class BlenderIdSignIn(OAuthSignIn):
|
||||
|
||||
session['blender_id_oauth_token'] = access_token
|
||||
me = oauth_session.get('user').json()
|
||||
return OAuthUserResponse(str(me['id']), me['email'])
|
||||
|
||||
# Blender ID doesn't tell us which scopes were granted by the user, so
|
||||
# for now assume we got all the scopes we requested.
|
||||
# (see https://github.com/jazzband/django-oauth-toolkit/issues/644)
|
||||
return OAuthUserResponse(str(me['id']), me['email'], access_token, self.scopes)
|
||||
|
||||
|
||||
class FacebookSignIn(OAuthSignIn):
|
||||
@@ -189,7 +197,7 @@ class FacebookSignIn(OAuthSignIn):
|
||||
me = oauth_session.get('me?fields=id,email').json()
|
||||
# TODO handle case when user chooses not to disclose en email
|
||||
# see https://developers.facebook.com/docs/graph-api/reference/user/
|
||||
return OAuthUserResponse(me['id'], me.get('email'))
|
||||
return OAuthUserResponse(me['id'], me.get('email'), '', [])
|
||||
|
||||
|
||||
class GoogleSignIn(OAuthSignIn):
|
||||
@@ -217,4 +225,4 @@ class GoogleSignIn(OAuthSignIn):
|
||||
oauth_session = self.make_oauth_session()
|
||||
|
||||
me = oauth_session.get('userinfo').json()
|
||||
return OAuthUserResponse(str(me['id']), me['email'])
|
||||
return OAuthUserResponse(str(me['id']), me['email'], '', [])
|
||||
|
266
pillar/badge_sync.py
Normal file
266
pillar/badge_sync.py
Normal file
@@ -0,0 +1,266 @@
|
||||
import collections
|
||||
import datetime
|
||||
import logging
|
||||
import typing
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import bson
|
||||
import requests
|
||||
|
||||
from pillar import current_app, auth
|
||||
from pillar.api.utils import utcnow
|
||||
|
||||
SyncUser = collections.namedtuple('SyncUser', 'user_id token bid_user_id')
|
||||
BadgeHTML = collections.namedtuple('BadgeHTML', 'html expires')
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class StopRefreshing(Exception):
|
||||
"""Indicates that Blender ID is having problems.
|
||||
|
||||
Further badge refreshes should be put on hold to avoid bludgeoning
|
||||
a suffering Blender ID.
|
||||
"""
|
||||
|
||||
|
||||
def find_user_to_sync(user_id: bson.ObjectId) -> typing.Optional[SyncUser]:
|
||||
"""Return user information for syncing badges for a specific user.
|
||||
|
||||
Returns None if the user cannot be synced (no 'badge' scope on a token,
|
||||
or no Blender ID user_id known).
|
||||
"""
|
||||
my_log = log.getChild('refresh_single_user')
|
||||
|
||||
now = utcnow()
|
||||
tokens_coll = current_app.db('tokens')
|
||||
users_coll = current_app.db('users')
|
||||
|
||||
token_info = tokens_coll.find_one({
|
||||
'user': user_id,
|
||||
'token': {'$exists': True},
|
||||
'oauth_scopes': 'badge',
|
||||
'expire_time': {'$gt': now},
|
||||
})
|
||||
if not token_info:
|
||||
my_log.debug('No token with scope "badge" for user %s', user_id)
|
||||
return None
|
||||
|
||||
user_info = users_coll.find_one({'_id': user_id})
|
||||
# TODO(Sybren): do this filtering in the MongoDB query:
|
||||
bid_user_ids = [auth_info.get('user_id')
|
||||
for auth_info in user_info.get('auth', [])
|
||||
if auth_info.get('provider', '') == 'blender-id' and auth_info.get('user_id')]
|
||||
if not bid_user_ids:
|
||||
my_log.debug('No Blender ID user_id for user %s', user_id)
|
||||
return None
|
||||
|
||||
bid_user_id = bid_user_ids[0]
|
||||
return SyncUser(user_id=user_id, token=token_info['token'], bid_user_id=bid_user_id)
|
||||
|
||||
|
||||
def find_users_to_sync() -> typing.Iterable[SyncUser]:
|
||||
"""Return user information of syncable users with badges."""
|
||||
|
||||
now = utcnow()
|
||||
tokens_coll = current_app.db('tokens')
|
||||
cursor = tokens_coll.aggregate([
|
||||
# Find all users who have a 'badge' scope in their OAuth token.
|
||||
{'$match': {
|
||||
'token': {'$exists': True},
|
||||
'oauth_scopes': 'badge',
|
||||
'expire_time': {'$gt': now},
|
||||
# TODO(Sybren): save real token expiry time but keep checking tokens hourly when they are used!
|
||||
}},
|
||||
{'$lookup': {
|
||||
'from': 'users',
|
||||
'localField': 'user',
|
||||
'foreignField': '_id',
|
||||
'as': 'user'
|
||||
}},
|
||||
|
||||
# Prevent 'user' from being an array.
|
||||
{'$unwind': {'path': '$user'}},
|
||||
|
||||
# Get the Blender ID user ID only.
|
||||
{'$unwind': {'path': '$user.auth'}},
|
||||
{'$match': {'user.auth.provider': 'blender-id'}},
|
||||
|
||||
# Only select those users whose badge doesn't exist or has expired.
|
||||
{'$match': {
|
||||
'user.badges.expires': {'$not': {'$gt': now}}
|
||||
}},
|
||||
|
||||
# Make sure that the badges that expire last are also refreshed last.
|
||||
{'$sort': {'user.badges.expires': 1}},
|
||||
|
||||
# Reduce the document to the info we're after.
|
||||
{'$project': {
|
||||
'token': True,
|
||||
'user._id': True,
|
||||
'user.auth.user_id': True,
|
||||
}},
|
||||
])
|
||||
|
||||
log.debug('Aggregating tokens and users')
|
||||
for user_info in cursor:
|
||||
log.debug('User %s has badges %s',
|
||||
user_info['user']['_id'], user_info['user'].get('badges'))
|
||||
yield SyncUser(
|
||||
user_id=user_info['user']['_id'],
|
||||
token=user_info['token'],
|
||||
bid_user_id=user_info['user']['auth']['user_id'])
|
||||
|
||||
|
||||
def fetch_badge_html(session: requests.Session, user: SyncUser, size: str) \
|
||||
-> str:
|
||||
"""Fetch a Blender ID badge for this user.
|
||||
|
||||
:param session:
|
||||
:param user:
|
||||
:param size: Size indication for the badge images, see the Blender ID
|
||||
documentation/code. As of this writing valid sizes are {'s', 'm', 'l'}.
|
||||
"""
|
||||
my_log = log.getChild('fetch_badge_html')
|
||||
|
||||
blender_id_endpoint = current_app.config['BLENDER_ID_ENDPOINT']
|
||||
url = urljoin(blender_id_endpoint, f'api/badges/{user.bid_user_id}/html/{size}')
|
||||
|
||||
my_log.debug('Fetching badge HTML at %s for user %s', url, user.user_id)
|
||||
try:
|
||||
resp = session.get(url, headers={'Authorization': f'Bearer {user.token}'})
|
||||
except requests.ConnectionError as ex:
|
||||
my_log.warning('Unable to connect to Blender ID at %s: %s', url, ex)
|
||||
raise StopRefreshing()
|
||||
|
||||
if resp.status_code == 204:
|
||||
my_log.debug('No badges for user %s', user.user_id)
|
||||
return ''
|
||||
if resp.status_code == 403:
|
||||
# TODO(Sybren): this indicates the token is invalid, so we could just as well delete it.
|
||||
my_log.warning('Tried fetching %s for user %s but received a 403: %s',
|
||||
url, user.user_id, resp.text)
|
||||
return ''
|
||||
if resp.status_code == 400:
|
||||
my_log.warning('Blender ID did not accept our GET request at %s for user %s: %s',
|
||||
url, user.user_id, resp.text)
|
||||
return ''
|
||||
if resp.status_code == 500:
|
||||
my_log.warning('Blender ID returned an internal server error on %s for user %s, '
|
||||
'aborting all badge refreshes: %s', url, user.user_id, resp.text)
|
||||
raise StopRefreshing()
|
||||
if resp.status_code == 404:
|
||||
my_log.warning('Blender ID has no user %s for our user %s', user.bid_user_id, user.user_id)
|
||||
return ''
|
||||
resp.raise_for_status()
|
||||
return resp.text
|
||||
|
||||
|
||||
def refresh_all_badges(only_user_id: typing.Optional[bson.ObjectId] = None, *,
|
||||
dry_run=False,
|
||||
timelimit: datetime.timedelta):
|
||||
"""Re-fetch all badges for all users, except when already refreshed recently.
|
||||
|
||||
:param only_user_id: Only refresh this user. This is expected to be used
|
||||
sparingly during manual maintenance / debugging sessions only. It does
|
||||
fetch all users to refresh, and in Python code skips all except the
|
||||
given one.
|
||||
:param dry_run: if True the changes are described in the log, but not performed.
|
||||
:param timelimit: Refreshing will stop after this time. This allows for cron(-like)
|
||||
jobs to run without overlapping, even when the number fo badges to refresh
|
||||
becomes larger than possible within the period of the cron job.
|
||||
"""
|
||||
my_log = log.getChild('refresh_all_badges')
|
||||
|
||||
# Test the config before we start looping over the world.
|
||||
badge_expiry = badge_expiry_config()
|
||||
if not badge_expiry or not isinstance(badge_expiry, datetime.timedelta):
|
||||
raise ValueError('BLENDER_ID_BADGE_EXPIRY not configured properly, should be a timedelta')
|
||||
|
||||
session = _get_requests_session()
|
||||
deadline = utcnow() + timelimit
|
||||
|
||||
num_updates = 0
|
||||
for user_info in find_users_to_sync():
|
||||
if utcnow() > deadline:
|
||||
my_log.info('Stopping badge refresh because the timelimit %s (H:MM:SS) was hit.',
|
||||
timelimit)
|
||||
break
|
||||
|
||||
if only_user_id and user_info.user_id != only_user_id:
|
||||
my_log.debug('Skipping user %s', user_info.user_id)
|
||||
continue
|
||||
try:
|
||||
badge_html = fetch_badge_html(session, user_info, 's')
|
||||
except StopRefreshing:
|
||||
my_log.error('Blender ID has internal problems, stopping badge refreshing at user %s',
|
||||
user_info)
|
||||
break
|
||||
|
||||
num_updates += 1
|
||||
update_badges(user_info, badge_html, badge_expiry, dry_run=dry_run)
|
||||
my_log.info('Updated badges of %d users%s', num_updates, ' (dry-run)' if dry_run else '')
|
||||
|
||||
|
||||
def _get_requests_session() -> requests.Session:
|
||||
from requests.adapters import HTTPAdapter
|
||||
session = requests.Session()
|
||||
session.mount('https://', HTTPAdapter(max_retries=5))
|
||||
return session
|
||||
|
||||
|
||||
def refresh_single_user(user_id: bson.ObjectId):
|
||||
"""Refresh badges for a single user."""
|
||||
my_log = log.getChild('refresh_single_user')
|
||||
|
||||
badge_expiry = badge_expiry_config()
|
||||
if not badge_expiry:
|
||||
my_log.warning('Skipping badge fetching, BLENDER_ID_BADGE_EXPIRY not configured')
|
||||
|
||||
my_log.debug('Fetching badges for user %s', user_id)
|
||||
session = _get_requests_session()
|
||||
user_info = find_user_to_sync(user_id)
|
||||
if not user_info:
|
||||
return
|
||||
|
||||
try:
|
||||
badge_html = fetch_badge_html(session, user_info, 's')
|
||||
except StopRefreshing:
|
||||
my_log.error('Blender ID has internal problems, stopping badge refreshing at user %s',
|
||||
user_info)
|
||||
return
|
||||
|
||||
update_badges(user_info, badge_html, badge_expiry, dry_run=False)
|
||||
my_log.info('Updated badges of user %s', user_id)
|
||||
|
||||
|
||||
def update_badges(user_info: SyncUser, badge_html: str, badge_expiry: datetime.timedelta,
|
||||
*, dry_run: bool):
|
||||
my_log = log.getChild('update_badges')
|
||||
users_coll = current_app.db('users')
|
||||
|
||||
update = {'badges': {
|
||||
'html': badge_html,
|
||||
'expires': utcnow() + badge_expiry,
|
||||
}}
|
||||
my_log.info('Updating badges HTML for Blender ID %s, user %s',
|
||||
user_info.bid_user_id, user_info.user_id)
|
||||
|
||||
if dry_run:
|
||||
return
|
||||
|
||||
result = users_coll.update_one({'_id': user_info.user_id},
|
||||
{'$set': update})
|
||||
if result.matched_count != 1:
|
||||
my_log.warning('Unable to update badges for user %s', user_info.user_id)
|
||||
|
||||
|
||||
def badge_expiry_config() -> datetime.timedelta:
|
||||
return current_app.config.get('BLENDER_ID_BADGE_EXPIRY')
|
||||
|
||||
|
||||
@auth.user_logged_in.connect
|
||||
def sync_badge_upon_login(sender: auth.UserClass, **kwargs):
|
||||
"""Auto-sync badges when a user logs in."""
|
||||
|
||||
log.info('Refreshing badge of %s because they logged in', sender.user_id)
|
||||
refresh_single_user(sender.user_id)
|
@@ -1,38 +0,0 @@
|
||||
import logging
|
||||
|
||||
from algoliasearch.helpers import AlgoliaException
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def push_updated_user(user_to_index: dict):
|
||||
"""Push an update to the Algolia index when a user item is updated"""
|
||||
|
||||
from pillar.api.utils.algolia import index_user_save
|
||||
|
||||
try:
|
||||
index_user_save(user_to_index)
|
||||
except AlgoliaException as ex:
|
||||
log.warning(
|
||||
'Unable to push user info to Algolia for user "%s", id=%s; %s', # noqa
|
||||
user_to_index.get('username'),
|
||||
user_to_index.get('objectID'), ex)
|
||||
|
||||
|
||||
def index_node_save(node_to_index: dict):
|
||||
from pillar.api.utils import algolia
|
||||
|
||||
try:
|
||||
algolia.index_node_save(node_to_index)
|
||||
except AlgoliaException as ex:
|
||||
log.warning(
|
||||
'Unable to push node info to Algolia for node %s; %s', node_to_index, ex) # noqa
|
||||
|
||||
|
||||
def index_node_delete(delete_id: str):
|
||||
|
||||
from pillar.api.utils import algolia
|
||||
try:
|
||||
algolia.index_node_delete(delete_id)
|
||||
except AlgoliaException as ex:
|
||||
log.warning('Unable to delete node info to Algolia for node %s; %s', delete_id, ex) # noqa
|
20
pillar/celery/badges.py
Normal file
20
pillar/celery/badges.py
Normal file
@@ -0,0 +1,20 @@
|
||||
"""Badge HTML synchronisation.
|
||||
|
||||
Note that this module can only be imported when an application context is
|
||||
active. Best to late-import this in the functions where it's needed.
|
||||
"""
|
||||
import datetime
|
||||
import logging
|
||||
|
||||
from pillar import current_app, badge_sync
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@current_app.celery.task(ignore_result=True)
|
||||
def sync_badges_for_users(timelimit_seconds: int):
|
||||
"""Synchronises Blender ID badges for the most-urgent users."""
|
||||
|
||||
timelimit = datetime.timedelta(seconds=timelimit_seconds)
|
||||
log.info('Refreshing badges, timelimit is %s (H:MM:SS)', timelimit)
|
||||
badge_sync.refresh_all_badges(timelimit=timelimit)
|
@@ -1,4 +1,6 @@
|
||||
import logging
|
||||
|
||||
import bleach
|
||||
from bson import ObjectId
|
||||
|
||||
from pillar import current_app
|
||||
@@ -10,7 +12,7 @@ from pillar.api.search import algolia_indexing
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
INDEX_ALLOWED_NODE_TYPES = {'asset', 'texture', 'group', 'hdri'}
|
||||
INDEX_ALLOWED_NODE_TYPES = {'asset', 'texture', 'group', 'hdri', 'post'}
|
||||
|
||||
|
||||
SEARCH_BACKENDS = {
|
||||
@@ -28,34 +30,6 @@ def _get_node_from_id(node_id: str):
|
||||
return node
|
||||
|
||||
|
||||
def _handle_picture(node: dict, to_index: dict):
|
||||
"""Add picture URL in-place to the to-be-indexed node."""
|
||||
|
||||
picture_id = node.get('picture')
|
||||
if not picture_id:
|
||||
return
|
||||
|
||||
files_collection = current_app.data.driver.db['files']
|
||||
lookup = {'_id': ObjectId(picture_id)}
|
||||
picture = files_collection.find_one(lookup)
|
||||
|
||||
for item in picture.get('variations', []):
|
||||
if item['size'] != 't':
|
||||
continue
|
||||
|
||||
# Not all files have a project...
|
||||
pid = picture.get('project')
|
||||
if pid:
|
||||
link = generate_link(picture['backend'],
|
||||
item['file_path'],
|
||||
str(pid),
|
||||
is_public=True)
|
||||
else:
|
||||
link = item['link']
|
||||
to_index['picture'] = link
|
||||
break
|
||||
|
||||
|
||||
def prepare_node_data(node_id: str, node: dict=None) -> dict:
|
||||
"""Given a node id or a node document, return an indexable version of it.
|
||||
|
||||
@@ -86,25 +60,30 @@ def prepare_node_data(node_id: str, node: dict=None) -> dict:
|
||||
users_collection = current_app.data.driver.db['users']
|
||||
user = users_collection.find_one({'_id': ObjectId(node['user'])})
|
||||
|
||||
clean_description = bleach.clean(node.get('_description_html') or '', strip=True)
|
||||
if not clean_description and node['node_type'] == 'post':
|
||||
clean_description = bleach.clean(node['properties'].get('_content_html') or '', strip=True)
|
||||
|
||||
to_index = {
|
||||
'objectID': node['_id'],
|
||||
'name': node['name'],
|
||||
'project': {
|
||||
'_id': project['_id'],
|
||||
'name': project['name']
|
||||
'name': project['name'],
|
||||
'url': project['url'],
|
||||
},
|
||||
'created': node['_created'],
|
||||
'updated': node['_updated'],
|
||||
'node_type': node['node_type'],
|
||||
'picture': node.get('picture') or '',
|
||||
'user': {
|
||||
'_id': user['_id'],
|
||||
'full_name': user['full_name']
|
||||
},
|
||||
'description': node.get('description'),
|
||||
'description': clean_description or None,
|
||||
'is_free': False
|
||||
}
|
||||
|
||||
_handle_picture(node, to_index)
|
||||
|
||||
# If the node has world permissions, compute the Free permission
|
||||
if 'world' in node.get('permissions', {}):
|
||||
if 'GET' in node['permissions']['world']:
|
||||
|
@@ -13,6 +13,7 @@ from pillar.cli.maintenance import manager_maintenance
|
||||
from pillar.cli.operations import manager_operations
|
||||
from pillar.cli.setup import manager_setup
|
||||
from pillar.cli.elastic import manager_elastic
|
||||
from . import badges
|
||||
|
||||
from pillar.cli import translations
|
||||
|
||||
@@ -24,3 +25,4 @@ manager.add_command("maintenance", manager_maintenance)
|
||||
manager.add_command("setup", manager_setup)
|
||||
manager.add_command("operations", manager_operations)
|
||||
manager.add_command("elastic", manager_elastic)
|
||||
manager.add_command("badges", badges.manager)
|
||||
|
39
pillar/cli/badges.py
Normal file
39
pillar/cli/badges.py
Normal file
@@ -0,0 +1,39 @@
|
||||
import datetime
|
||||
import logging
|
||||
|
||||
from flask_script import Manager
|
||||
from pillar import current_app, badge_sync
|
||||
from pillar.api.utils import utcnow
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
manager = Manager(current_app, usage="Badge operations")
|
||||
|
||||
|
||||
@manager.option('-u', '--user', dest='email', default='', help='Email address of the user to sync')
|
||||
@manager.option('-a', '--all', dest='sync_all', action='store_true', default=False,
|
||||
help='Sync all users')
|
||||
@manager.option('--go', action='store_true', default=False,
|
||||
help='Actually perform the sync; otherwise it is a dry-run.')
|
||||
def sync(email: str = '', sync_all: bool=False, go: bool=False):
|
||||
if bool(email) == bool(sync_all):
|
||||
raise ValueError('Use either --user or --all.')
|
||||
|
||||
if email:
|
||||
users_coll = current_app.db('users')
|
||||
db_user = users_coll.find_one({'email': email}, projection={'_id': True})
|
||||
if not db_user:
|
||||
raise ValueError(f'No user with email {email!r} found')
|
||||
specific_user = db_user['_id']
|
||||
else:
|
||||
specific_user = None
|
||||
|
||||
if not go:
|
||||
log.info('Performing dry-run, not going to change the user database.')
|
||||
start_time = utcnow()
|
||||
badge_sync.refresh_all_badges(specific_user, dry_run=not go,
|
||||
timelimit=datetime.timedelta(hours=1))
|
||||
end_time = utcnow()
|
||||
log.info('%s took %s (H:MM:SS)',
|
||||
'Updating user badges' if go else 'Dry-run',
|
||||
end_time - start_time)
|
@@ -559,50 +559,6 @@ def replace_pillar_node_type_schemas(project_url=None, all_projects=False, missi
|
||||
projects_changed, projects_seen)
|
||||
|
||||
|
||||
@manager_maintenance.command
|
||||
def remarkdown_comments():
|
||||
"""Retranslates all Markdown to HTML for all comment nodes.
|
||||
"""
|
||||
|
||||
from pillar.api.nodes import convert_markdown
|
||||
|
||||
nodes_collection = current_app.db()['nodes']
|
||||
comments = nodes_collection.find({'node_type': 'comment'},
|
||||
projection={'properties.content': 1,
|
||||
'node_type': 1})
|
||||
|
||||
updated = identical = skipped = errors = 0
|
||||
for node in comments:
|
||||
convert_markdown(node)
|
||||
node_id = node['_id']
|
||||
|
||||
try:
|
||||
content_html = node['properties']['content_html']
|
||||
except KeyError:
|
||||
log.warning('Node %s has no content_html', node_id)
|
||||
skipped += 1
|
||||
continue
|
||||
|
||||
result = nodes_collection.update_one(
|
||||
{'_id': node_id},
|
||||
{'$set': {'properties.content_html': content_html}}
|
||||
)
|
||||
if result.matched_count != 1:
|
||||
log.error('Unable to update node %s', node_id)
|
||||
errors += 1
|
||||
continue
|
||||
|
||||
if result.modified_count:
|
||||
updated += 1
|
||||
else:
|
||||
identical += 1
|
||||
|
||||
log.info('updated : %i', updated)
|
||||
log.info('identical: %i', identical)
|
||||
log.info('skipped : %i', skipped)
|
||||
log.info('errors : %i', errors)
|
||||
|
||||
|
||||
@manager_maintenance.option('-p', '--project', dest='proj_url', nargs='?',
|
||||
help='Project URL')
|
||||
@manager_maintenance.option('-a', '--all', dest='all_projects', action='store_true', default=False,
|
||||
@@ -684,8 +640,8 @@ def upgrade_attachment_schema(proj_url=None, all_projects=False, go=False):
|
||||
log_proj()
|
||||
log.info('Removed %d empty attachment dicts', res.modified_count)
|
||||
else:
|
||||
to_remove = nodes_coll.count({'properties.attachments': {},
|
||||
'project': project['_id']})
|
||||
to_remove = nodes_coll.count_documents({'properties.attachments': {},
|
||||
'project': project['_id']})
|
||||
if to_remove:
|
||||
log_proj()
|
||||
log.info('Would remove %d empty attachment dicts', to_remove)
|
||||
@@ -767,7 +723,9 @@ def iter_markdown(proj_node_types: dict, some_node: dict, callback: typing.Calla
|
||||
continue
|
||||
to_visit.append((subdoc, definition['schema']))
|
||||
continue
|
||||
if definition.get('coerce') != 'markdown':
|
||||
coerce = definition.get('coerce') # Eve < 0.8
|
||||
validator = definition.get('validator') # Eve >= 0.8
|
||||
if coerce != 'markdown' and validator != 'markdown':
|
||||
continue
|
||||
|
||||
my_log.debug('I have to change %r of %s', key, doc)
|
||||
@@ -1064,3 +1022,156 @@ def delete_orphan_files():
|
||||
log.warning('Soft-deletion modified %d of %d files', res.modified_count, file_count)
|
||||
|
||||
log.info('%d files have been soft-deleted', res.modified_count)
|
||||
|
||||
|
||||
@manager_maintenance.command
|
||||
def find_video_files_without_duration():
|
||||
"""Finds video files without any duration
|
||||
|
||||
This is a heavy operation. Use with care.
|
||||
"""
|
||||
from pathlib import Path
|
||||
|
||||
output_fpath = Path(current_app.config['STORAGE_DIR']) / 'video_files_without_duration.txt'
|
||||
if output_fpath.exists():
|
||||
log.error('Output filename %s already exists, remove it first.', output_fpath)
|
||||
return 1
|
||||
|
||||
start_timestamp = datetime.datetime.now()
|
||||
files_coll = current_app.db('files')
|
||||
starts_with_video = re.compile("^video", re.IGNORECASE)
|
||||
aggr = files_coll.aggregate([
|
||||
{'$match': {'content_type': starts_with_video,
|
||||
'_deleted': {'$ne': True}}},
|
||||
{'$unwind': '$variations'},
|
||||
{'$match': {
|
||||
'variations.duration': {'$not': {'$gt': 0}}
|
||||
}},
|
||||
{'$project': {'_id': 1}}
|
||||
])
|
||||
|
||||
file_ids = [str(f['_id']) for f in aggr]
|
||||
nbr_files = len(file_ids)
|
||||
log.info('Total nbr video files without duration: %d', nbr_files)
|
||||
|
||||
end_timestamp = datetime.datetime.now()
|
||||
duration = end_timestamp - start_timestamp
|
||||
log.info('Finding files took %s', duration)
|
||||
|
||||
log.info('Writing Object IDs to %s', output_fpath)
|
||||
with output_fpath.open('w', encoding='ascii') as outfile:
|
||||
outfile.write('\n'.join(sorted(file_ids)))
|
||||
|
||||
@manager_maintenance.command
|
||||
def find_video_nodes_without_duration():
|
||||
"""Finds video nodes without any duration
|
||||
|
||||
This is a heavy operation. Use with care.
|
||||
"""
|
||||
from pathlib import Path
|
||||
|
||||
output_fpath = Path(current_app.config['STORAGE_DIR']) / 'video_nodes_without_duration.txt'
|
||||
if output_fpath.exists():
|
||||
log.error('Output filename %s already exists, remove it first.', output_fpath)
|
||||
return 1
|
||||
|
||||
start_timestamp = datetime.datetime.now()
|
||||
nodes_coll = current_app.db('nodes')
|
||||
|
||||
aggr = nodes_coll.aggregate([
|
||||
{'$match': {'node_type': 'asset',
|
||||
'properties.content_type': 'video',
|
||||
'_deleted': {'$ne': True},
|
||||
'properties.duration_seconds': {'$not': {'$gt': 0}}}},
|
||||
{'$project': {'_id': 1}}
|
||||
])
|
||||
|
||||
file_ids = [str(f['_id']) for f in aggr]
|
||||
nbr_files = len(file_ids)
|
||||
log.info('Total nbr video nodes without duration: %d', nbr_files)
|
||||
|
||||
end_timestamp = datetime.datetime.now()
|
||||
duration = end_timestamp - start_timestamp
|
||||
log.info('Finding nodes took %s', duration)
|
||||
|
||||
log.info('Writing Object IDs to %s', output_fpath)
|
||||
with output_fpath.open('w', encoding='ascii') as outfile:
|
||||
outfile.write('\n'.join(sorted(file_ids)))
|
||||
|
||||
|
||||
@manager_maintenance.option('-n', '--nodes', dest='nodes_to_update', nargs='*',
|
||||
help='List of nodes to update')
|
||||
@manager_maintenance.option('-a', '--all', dest='all_nodes', action='store_true', default=False,
|
||||
help='Update on all video nodes.')
|
||||
@manager_maintenance.option('-g', '--go', dest='go', action='store_true', default=False,
|
||||
help='Actually perform the changes (otherwise just show as dry-run).')
|
||||
def reconcile_node_video_duration(nodes_to_update=None, all_nodes=False, go=False):
|
||||
"""Copy video duration from file.variations.duration to node.properties.duraion_seconds
|
||||
|
||||
This is a heavy operation. Use with care.
|
||||
"""
|
||||
from pillar.api.utils import random_etag, utcnow
|
||||
|
||||
if bool(nodes_to_update) == all_nodes:
|
||||
log.error('Use either --nodes or --all.')
|
||||
return 1
|
||||
|
||||
start_timestamp = datetime.datetime.now()
|
||||
|
||||
nodes_coll = current_app.db('nodes')
|
||||
node_subset = []
|
||||
if nodes_to_update:
|
||||
node_subset = [{'$match': {'_id': {'$in': [ObjectId(nid) for nid in nodes_to_update]}}}]
|
||||
files = nodes_coll.aggregate(
|
||||
[
|
||||
*node_subset,
|
||||
{'$match': {
|
||||
'node_type': 'asset',
|
||||
'properties.content_type': 'video',
|
||||
'_deleted': {'$ne': True}}
|
||||
},
|
||||
{'$lookup': {
|
||||
'from': 'files',
|
||||
'localField': 'properties.file',
|
||||
'foreignField': '_id',
|
||||
'as': '_files',
|
||||
}},
|
||||
{'$unwind': '$_files'},
|
||||
{'$unwind': '$_files.variations'},
|
||||
{'$match': {'_files.variations.duration': {'$gt': 0}}},
|
||||
{'$addFields': {
|
||||
'need_update': {'$ne': ['$_files.variations.duration', '$properties.duration_seconds']}
|
||||
}},
|
||||
{'$match': {'need_update': True}},
|
||||
{'$project': {
|
||||
'_id': 1,
|
||||
'duration': '$_files.variations.duration',
|
||||
}}]
|
||||
)
|
||||
|
||||
if not go:
|
||||
log.info('Would try to update %d nodes', len(list(files)))
|
||||
return 0
|
||||
|
||||
modified_count = 0
|
||||
for f in files:
|
||||
log.debug('Updating node %s with duration %d', f['_id'], f['duration'])
|
||||
new_etag = random_etag()
|
||||
now = utcnow()
|
||||
resp = nodes_coll.update_one(
|
||||
{'_id': f['_id']},
|
||||
{'$set': {
|
||||
'properties.duration_seconds': f['duration'],
|
||||
'_etag': new_etag,
|
||||
'_updated': now,
|
||||
}}
|
||||
)
|
||||
if resp.modified_count == 0:
|
||||
log.debug('Node %s was already up to date', f['_id'])
|
||||
modified_count += resp.modified_count
|
||||
|
||||
log.info('Updated %d nodes', modified_count)
|
||||
end_timestamp = datetime.datetime.now()
|
||||
duration = end_timestamp - start_timestamp
|
||||
log.info('Operation took %s', duration)
|
||||
return 0
|
||||
|
@@ -1,6 +1,8 @@
|
||||
from collections import defaultdict
|
||||
import datetime
|
||||
import os.path
|
||||
from os import getenv
|
||||
from collections import defaultdict
|
||||
|
||||
import requests.certs
|
||||
|
||||
# Certificate file for communication with other systems.
|
||||
@@ -29,10 +31,11 @@ DEBUG = False
|
||||
SECRET_KEY = ''
|
||||
|
||||
# Authentication token hashing key. If empty falls back to UTF8-encoded SECRET_KEY with a warning.
|
||||
# Not used to hash new tokens, but it is used to check pre-existing hashed tokens.
|
||||
AUTH_TOKEN_HMAC_KEY = b''
|
||||
|
||||
# Authentication settings
|
||||
BLENDER_ID_ENDPOINT = 'http://blender-id:8000/'
|
||||
BLENDER_ID_ENDPOINT = 'http://id.local:8000/'
|
||||
|
||||
CDN_USE_URL_SIGNING = True
|
||||
CDN_SERVICE_DOMAIN_PROTOCOL = 'https'
|
||||
@@ -124,9 +127,8 @@ BLENDER_ID_USER_INFO_TOKEN = '-set-in-config-local-'
|
||||
# Example entry:
|
||||
# OAUTH_CREDENTIALS = {
|
||||
# 'blender-id': {
|
||||
# 'id': 'CLOUD-OF-SNOWFLAKES-43',
|
||||
# 'id': 'CLOUD-OF-SNOWFLAKES-42',
|
||||
# 'secret': 'thesecret',
|
||||
# 'base_url': 'http://blender-id:8000/'
|
||||
# }
|
||||
# }
|
||||
# OAuth providers are defined in pillar.auth.oauth
|
||||
@@ -204,8 +206,18 @@ CELERY_BEAT_SCHEDULE = {
|
||||
'schedule': 600, # every N seconds
|
||||
'args': ('gcs', 100)
|
||||
},
|
||||
'refresh-blenderid-badges': {
|
||||
'task': 'pillar.celery.badges.sync_badges_for_users',
|
||||
'schedule': 10 * 60, # every N seconds
|
||||
'args': (9 * 60, ), # time limit in seconds, keep shorter than 'schedule'
|
||||
}
|
||||
}
|
||||
|
||||
# Badges will be re-fetched every timedelta.
|
||||
# TODO(Sybren): A proper value should be determined after we actually have users with badges.
|
||||
BLENDER_ID_BADGE_EXPIRY = datetime.timedelta(hours=4)
|
||||
|
||||
|
||||
# Mapping from user role to capabilities obtained by users with that role.
|
||||
USER_CAPABILITIES = defaultdict(**{
|
||||
'subscriber': {'subscriber', 'home-project'},
|
||||
@@ -258,3 +270,14 @@ STATIC_FILE_HASH = ''
|
||||
# all API endpoints do not need it. On the views that require it, we use the
|
||||
# current_app.csrf.protect() method.
|
||||
WTF_CSRF_CHECK_DEFAULT = False
|
||||
|
||||
# Flask Debug Toolbar. Enable it by overriding DEBUG_TB_ENABLED in config_local.py.
|
||||
DEBUG_TB_ENABLED = False
|
||||
DEBUG_TB_PANELS = [
|
||||
'flask_debugtoolbar.panels.versions.VersionDebugPanel',
|
||||
'flask_debugtoolbar.panels.headers.HeaderDebugPanel',
|
||||
'flask_debugtoolbar.panels.request_vars.RequestVarsDebugPanel',
|
||||
'flask_debugtoolbar.panels.config_vars.ConfigVarsDebugPanel',
|
||||
'flask_debugtoolbar.panels.template.TemplateDebugPanel',
|
||||
'flask_debugtoolbar.panels.logger.LoggingPanel',
|
||||
'flask_debugtoolbar.panels.route_list.RouteListDebugPanel']
|
||||
|
@@ -45,11 +45,15 @@ ALLOWED_STYLES = [
|
||||
def markdown(s: str) -> str:
|
||||
commented_shortcodes = shortcodes.comment_shortcodes(s)
|
||||
tainted_html = CommonMark.commonmark(commented_shortcodes)
|
||||
safe_html = bleach.clean(tainted_html,
|
||||
tags=ALLOWED_TAGS,
|
||||
|
||||
# Create a Cleaner that supports parsing of bare links (see filters).
|
||||
cleaner = bleach.Cleaner(tags=ALLOWED_TAGS,
|
||||
attributes=ALLOWED_ATTRIBUTES,
|
||||
styles=ALLOWED_STYLES,
|
||||
strip_comments=False)
|
||||
strip_comments=False,
|
||||
filters=[bleach.linkifier.LinkifyFilter])
|
||||
|
||||
safe_html = cleaner.clean(tainted_html)
|
||||
return safe_html
|
||||
|
||||
|
||||
|
@@ -1,3 +1,5 @@
|
||||
import flask
|
||||
import raven.breadcrumbs
|
||||
from raven.contrib.flask import Sentry
|
||||
|
||||
from .auth import current_user
|
||||
@@ -14,16 +16,14 @@ class PillarSentry(Sentry):
|
||||
def init_app(self, app, *args, **kwargs):
|
||||
super().init_app(app, *args, **kwargs)
|
||||
|
||||
# We perform authentication of the user while handling the request,
|
||||
# so Sentry calls get_user_info() too early.
|
||||
flask.request_started.connect(self.__add_sentry_breadcrumbs, self)
|
||||
|
||||
def get_user_context_again(self, ):
|
||||
from flask import request
|
||||
|
||||
try:
|
||||
self.client.user_context(self.get_user_info(request))
|
||||
except Exception as e:
|
||||
self.client.logger.exception(str(e))
|
||||
def __add_sentry_breadcrumbs(self, sender, **extra):
|
||||
raven.breadcrumbs.record(
|
||||
message='Request started',
|
||||
category='http',
|
||||
data={'url': flask.request.url}
|
||||
)
|
||||
|
||||
def get_user_info(self, request):
|
||||
user_info = super().get_user_info(request)
|
||||
|
@@ -33,18 +33,57 @@ log = logging.getLogger(__name__)
|
||||
def shortcode(name: str):
|
||||
"""Class decorator for shortcodes."""
|
||||
|
||||
def decorator(cls):
|
||||
assert hasattr(cls, '__call__'), '@shortcode should be used on callables.'
|
||||
if isinstance(cls, type):
|
||||
instance = cls()
|
||||
def decorator(decorated):
|
||||
assert hasattr(decorated, '__call__'), '@shortcode should be used on callables.'
|
||||
if isinstance(decorated, type):
|
||||
as_callable = decorated()
|
||||
else:
|
||||
instance = cls
|
||||
shortcodes.register(name)(instance)
|
||||
return cls
|
||||
as_callable = decorated
|
||||
shortcodes.register(name)(as_callable)
|
||||
return decorated
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
class capcheck:
|
||||
"""Decorator for shortcodes.
|
||||
|
||||
On call, check for capabilities before calling the function. If the user does not
|
||||
have a capability, display a message insdead of the content.
|
||||
|
||||
kwargs:
|
||||
- 'cap': Capability required for viewing.
|
||||
- 'nocap': Optional, text shown when the user does not have this capability.
|
||||
- others: Passed to the decorated shortcode.
|
||||
"""
|
||||
|
||||
def __init__(self, decorated):
|
||||
assert hasattr(decorated, '__call__'), '@capcheck should be used on callables.'
|
||||
if isinstance(decorated, type):
|
||||
as_callable = decorated()
|
||||
else:
|
||||
as_callable = decorated
|
||||
self.decorated = as_callable
|
||||
|
||||
def __call__(self,
|
||||
context: typing.Any,
|
||||
content: str,
|
||||
pargs: typing.List[str],
|
||||
kwargs: typing.Dict[str, str]) -> str:
|
||||
from pillar.auth import current_user
|
||||
|
||||
cap = kwargs.pop('cap', '')
|
||||
if cap:
|
||||
nocap = kwargs.pop('nocap', '')
|
||||
if not current_user.has_cap(cap):
|
||||
if not nocap:
|
||||
return ''
|
||||
html = html_module.escape(nocap)
|
||||
return f'<p class="shortcode nocap">{html}</p>'
|
||||
|
||||
return self.decorated(context, content, pargs, kwargs)
|
||||
|
||||
|
||||
@shortcode('test')
|
||||
class Test:
|
||||
def __call__(self,
|
||||
@@ -68,6 +107,7 @@ class Test:
|
||||
|
||||
|
||||
@shortcode('youtube')
|
||||
@capcheck
|
||||
class YouTube:
|
||||
log = log.getChild('YouTube')
|
||||
|
||||
@@ -122,13 +162,17 @@ class YouTube:
|
||||
if not youtube_id:
|
||||
return html_module.escape('{youtube invalid YouTube ID/URL}')
|
||||
|
||||
src = f'https://www.youtube.com/embed/{youtube_id}?rel=0'
|
||||
html = f'<iframe class="shortcode youtube" width="{width}" height="{height}" src="{src}"' \
|
||||
f' frameborder="0" allow="autoplay; encrypted-media" allowfullscreen></iframe>'
|
||||
src = f'https://www.youtube.com/embed/{youtube_id}?rel=0'
|
||||
html = f'<div class="embed-responsive embed-responsive-16by9">' \
|
||||
f'<iframe class="shortcode youtube embed-responsive-item"' \
|
||||
f' width="{width}" height="{height}" src="{src}"' \
|
||||
f' frameborder="0" allow="autoplay; encrypted-media" allowfullscreen></iframe>' \
|
||||
f'</div>'
|
||||
return html
|
||||
|
||||
|
||||
@shortcode('iframe')
|
||||
@capcheck
|
||||
def iframe(context: typing.Any,
|
||||
content: str,
|
||||
pargs: typing.List[str],
|
||||
@@ -140,16 +184,6 @@ def iframe(context: typing.Any,
|
||||
- others: Turned into attributes for the iframe element.
|
||||
"""
|
||||
import xml.etree.ElementTree as ET
|
||||
from pillar.auth import current_user
|
||||
|
||||
cap = kwargs.pop('cap', '')
|
||||
if cap:
|
||||
nocap = kwargs.pop('nocap', '')
|
||||
if not current_user.has_cap(cap):
|
||||
if not nocap:
|
||||
return ''
|
||||
html = html_module.escape(nocap)
|
||||
return f'<p class="shortcode nocap">{html}</p>'
|
||||
|
||||
kwargs['class'] = f'shortcode {kwargs.get("class", "")}'.strip()
|
||||
element = ET.Element('iframe', kwargs)
|
||||
@@ -194,12 +228,25 @@ class Attachment:
|
||||
|
||||
return self.render(file_doc, pargs, kwargs)
|
||||
|
||||
def sdk_file(self, slug: str, node_properties: dict) -> pillarsdk.File:
|
||||
def sdk_file(self, slug: str, document: dict) -> pillarsdk.File:
|
||||
"""Return the file document for the attachment with this slug."""
|
||||
|
||||
from pillar.web import system_util
|
||||
|
||||
attachments = node_properties.get('attachments', {})
|
||||
# TODO (fsiddi) Make explicit what 'document' is.
|
||||
# In some cases we pass the entire node or project documents, in other cases
|
||||
# we pass node.properties. This should be unified at the level of do_markdown.
|
||||
# For now we do a quick hack and first look for 'properties' in the doc,
|
||||
# then we look for 'attachments'.
|
||||
|
||||
doc_properties = document.get('properties')
|
||||
if doc_properties:
|
||||
# We passed an entire document (all nodes must have 'properties')
|
||||
attachments = doc_properties.get('attachments', {})
|
||||
else:
|
||||
# The value of document could have been defined as 'node.properties'
|
||||
attachments = document.get('attachments', {})
|
||||
|
||||
attachment = attachments.get(slug)
|
||||
if not attachment:
|
||||
raise self.NoSuchSlug(slug)
|
||||
|
@@ -1,6 +1,7 @@
|
||||
# -*- encoding: utf-8 -*-
|
||||
|
||||
import base64
|
||||
import contextlib
|
||||
import copy
|
||||
import datetime
|
||||
import json
|
||||
@@ -10,11 +11,7 @@ import pathlib
|
||||
import sys
|
||||
import typing
|
||||
import unittest.mock
|
||||
|
||||
try:
|
||||
from urllib.parse import urlencode
|
||||
except ImportError:
|
||||
from urllib.parse import urlencode
|
||||
from urllib.parse import urlencode, urljoin
|
||||
|
||||
from bson import ObjectId, tz_util
|
||||
|
||||
@@ -27,6 +24,7 @@ from eve.tests import TestMinimal
|
||||
import pymongo.collection
|
||||
from flask.testing import FlaskClient
|
||||
import flask.ctx
|
||||
import flask.wrappers
|
||||
import responses
|
||||
|
||||
import pillar
|
||||
@@ -185,7 +183,7 @@ class AbstractPillarTest(TestMinimal):
|
||||
else:
|
||||
self.ensure_project_exists()
|
||||
|
||||
with self.app.test_request_context():
|
||||
with self.app.app_context():
|
||||
files_collection = self.app.data.driver.db['files']
|
||||
assert isinstance(files_collection, pymongo.collection.Collection)
|
||||
|
||||
@@ -326,15 +324,46 @@ class AbstractPillarTest(TestMinimal):
|
||||
|
||||
return user
|
||||
|
||||
def create_valid_auth_token(self, user_id, token='token'):
|
||||
@contextlib.contextmanager
|
||||
def login_as(self, user_id: typing.Union[str, ObjectId]):
|
||||
"""Context manager, within the context the app context is active and the user logged in.
|
||||
|
||||
The logging-in happens when a request starts, so it's only active when
|
||||
e.g. self.get() or self.post() or somesuch request is used.
|
||||
"""
|
||||
from pillar.auth import UserClass, login_user_object
|
||||
|
||||
if isinstance(user_id, str):
|
||||
user_oid = ObjectId(user_id)
|
||||
elif isinstance(user_id, ObjectId):
|
||||
user_oid = user_id
|
||||
else:
|
||||
raise TypeError(f'invalid type {type(user_id)} for parameter user_id')
|
||||
user_doc = self.fetch_user_from_db(user_oid)
|
||||
|
||||
def signal_handler(sender, **kwargs):
|
||||
login_user_object(user)
|
||||
|
||||
with self.app.app_context():
|
||||
user = UserClass.construct('', user_doc)
|
||||
with flask.request_started.connected_to(signal_handler, self.app):
|
||||
yield
|
||||
|
||||
# TODO: rename to 'create_auth_token' now that 'expire_in_days' can be negative.
|
||||
def create_valid_auth_token(self,
|
||||
user_id: ObjectId,
|
||||
token='token',
|
||||
*,
|
||||
oauth_scopes: typing.Optional[typing.List[str]]=None,
|
||||
expire_in_days=1) -> dict:
|
||||
from pillar.api.utils import utcnow
|
||||
|
||||
future = utcnow() + datetime.timedelta(days=1)
|
||||
future = utcnow() + datetime.timedelta(days=expire_in_days)
|
||||
|
||||
with self.app.test_request_context():
|
||||
from pillar.api.utils import authentication as auth
|
||||
|
||||
token_data = auth.store_token(user_id, token, future, None)
|
||||
token_data = auth.store_token(user_id, token, future, oauth_scopes=oauth_scopes)
|
||||
|
||||
return token_data
|
||||
|
||||
@@ -364,7 +393,7 @@ class AbstractPillarTest(TestMinimal):
|
||||
|
||||
return user_id
|
||||
|
||||
def create_node(self, node_doc):
|
||||
def create_node(self, node_doc) -> ObjectId:
|
||||
"""Creates a node, returning its ObjectId. """
|
||||
|
||||
with self.app.test_request_context():
|
||||
@@ -406,7 +435,7 @@ class AbstractPillarTest(TestMinimal):
|
||||
"""Sets up Responses to mock unhappy validation flow."""
|
||||
|
||||
responses.add(responses.POST,
|
||||
'%s/u/validate_token' % self.app.config['BLENDER_ID_ENDPOINT'],
|
||||
urljoin(self.app.config['BLENDER_ID_ENDPOINT'], 'u/validate_token'),
|
||||
json={'status': 'fail'},
|
||||
status=403)
|
||||
|
||||
@@ -414,7 +443,7 @@ class AbstractPillarTest(TestMinimal):
|
||||
"""Sets up Responses to mock happy validation flow."""
|
||||
|
||||
responses.add(responses.POST,
|
||||
'%s/u/validate_token' % self.app.config['BLENDER_ID_ENDPOINT'],
|
||||
urljoin(self.app.config['BLENDER_ID_ENDPOINT'], 'u/validate_token'),
|
||||
json=BLENDER_ID_USER_RESPONSE,
|
||||
status=200)
|
||||
|
||||
@@ -485,11 +514,10 @@ class AbstractPillarTest(TestMinimal):
|
||||
|
||||
def client_request(self, method, path, qs=None, expected_status=200, auth_token=None, json=None,
|
||||
data=None, headers=None, files=None, content_type=None, etag=None,
|
||||
environ_overrides=None):
|
||||
environ_overrides=None) -> flask.wrappers.Response:
|
||||
"""Performs a HTTP request to the server."""
|
||||
|
||||
from pillar.api.utils import dumps
|
||||
import json as mod_json
|
||||
|
||||
headers = headers or {}
|
||||
environ_overrides = environ_overrides or {}
|
||||
@@ -522,29 +550,21 @@ class AbstractPillarTest(TestMinimal):
|
||||
expected_status, resp.status_code, resp.data
|
||||
))
|
||||
|
||||
def get_json():
|
||||
if resp.mimetype != 'application/json':
|
||||
raise TypeError('Unable to load JSON from mimetype %r' % resp.mimetype)
|
||||
return mod_json.loads(resp.data)
|
||||
|
||||
resp.json = get_json
|
||||
resp.get_json = get_json
|
||||
|
||||
return resp
|
||||
|
||||
def get(self, *args, **kwargs):
|
||||
def get(self, *args, **kwargs) -> flask.wrappers.Response:
|
||||
return self.client_request('GET', *args, **kwargs)
|
||||
|
||||
def post(self, *args, **kwargs):
|
||||
def post(self, *args, **kwargs) -> flask.wrappers.Response:
|
||||
return self.client_request('POST', *args, **kwargs)
|
||||
|
||||
def put(self, *args, **kwargs):
|
||||
def put(self, *args, **kwargs) -> flask.wrappers.Response:
|
||||
return self.client_request('PUT', *args, **kwargs)
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
def delete(self, *args, **kwargs) -> flask.wrappers.Response:
|
||||
return self.client_request('DELETE', *args, **kwargs)
|
||||
|
||||
def patch(self, *args, **kwargs):
|
||||
def patch(self, *args, **kwargs) -> flask.wrappers.Response:
|
||||
return self.client_request('PATCH', *args, **kwargs)
|
||||
|
||||
def assertAllowsAccess(self,
|
||||
@@ -561,7 +581,7 @@ class AbstractPillarTest(TestMinimal):
|
||||
raise TypeError('expected_user_id should be a string or ObjectId, '
|
||||
f'but is {expected_user_id!r}')
|
||||
|
||||
resp = self.get('/api/users/me', expected_status=200, auth_token=token).json()
|
||||
resp = self.get('/api/users/me', expected_status=200, auth_token=token).get_json()
|
||||
|
||||
if expected_user_id:
|
||||
self.assertEqual(resp['_id'], str(expected_user_id))
|
||||
|
@@ -1,9 +1,9 @@
|
||||
"""Flask configuration file for unit testing."""
|
||||
|
||||
BLENDER_ID_ENDPOINT = 'http://127.0.0.1:8001' # nonexistant server, no trailing slash!
|
||||
BLENDER_ID_ENDPOINT = 'http://id.local:8001/' # Non existant server
|
||||
|
||||
SERVER_NAME = 'localhost'
|
||||
PILLAR_SERVER_ENDPOINT = 'http://localhost/api/'
|
||||
SERVER_NAME = 'localhost.local'
|
||||
PILLAR_SERVER_ENDPOINT = 'http://localhost.local/api/'
|
||||
|
||||
MAIN_PROJECT_ID = '5672beecc0261b2005ed1a33'
|
||||
|
||||
@@ -26,7 +26,6 @@ OAUTH_CREDENTIALS = {
|
||||
'blender-id': {
|
||||
'id': 'blender-id-app-id',
|
||||
'secret': 'blender-id–secret',
|
||||
'base_url': 'http://blender-id:8000/'
|
||||
},
|
||||
'facebook': {
|
||||
'id': 'fb-app-id',
|
||||
@@ -45,3 +44,5 @@ ELASTIC_INDICES = {
|
||||
|
||||
# MUST be 8 characters long, see pillar.flask_extra.HashedPathConverter
|
||||
STATIC_FILE_HASH = 'abcd1234'
|
||||
|
||||
CACHE_NO_NULL_WARNING = True
|
||||
|
@@ -1,6 +1,7 @@
|
||||
from pillar.api.eve_settings import *
|
||||
|
||||
MONGO_DBNAME = 'pillar_test'
|
||||
MONGO_USERNAME = None
|
||||
|
||||
|
||||
def override_eve():
|
||||
@@ -10,5 +11,7 @@ def override_eve():
|
||||
test_settings.MONGO_HOST = MONGO_HOST
|
||||
test_settings.MONGO_PORT = MONGO_PORT
|
||||
test_settings.MONGO_DBNAME = MONGO_DBNAME
|
||||
test_settings.MONGO1_USERNAME = MONGO_USERNAME
|
||||
tests.MONGO_HOST = MONGO_HOST
|
||||
tests.MONGO_DBNAME = MONGO_DBNAME
|
||||
tests.MONGO_USERNAME = MONGO_USERNAME
|
||||
|
@@ -1,6 +1,7 @@
|
||||
"""Our custom Jinja filters and other template stuff."""
|
||||
|
||||
import functools
|
||||
import json
|
||||
import logging
|
||||
import typing
|
||||
import urllib.parse
|
||||
@@ -13,6 +14,7 @@ import werkzeug.exceptions as wz_exceptions
|
||||
import pillarsdk
|
||||
|
||||
import pillar.api.utils
|
||||
from pillar.api.utils import pretty_duration
|
||||
from pillar.web.utils import pretty_date
|
||||
from pillar.web.nodes.routes import url_for_node
|
||||
import pillar.markdown
|
||||
@@ -28,6 +30,10 @@ def format_pretty_date_time(d):
|
||||
return pretty_date(d, detail=True)
|
||||
|
||||
|
||||
def format_pretty_duration(s):
|
||||
return pretty_duration(s)
|
||||
|
||||
|
||||
def format_undertitle(s):
|
||||
"""Underscore-replacing title filter.
|
||||
|
||||
@@ -200,9 +206,16 @@ def do_yesno(value, arg=None):
|
||||
return no
|
||||
|
||||
|
||||
def do_json(some_object) -> str:
|
||||
if isinstance(some_object, pillarsdk.Resource):
|
||||
some_object = some_object.to_dict()
|
||||
return json.dumps(some_object)
|
||||
|
||||
|
||||
def setup_jinja_env(jinja_env, app_config: dict):
|
||||
jinja_env.filters['pretty_date'] = format_pretty_date
|
||||
jinja_env.filters['pretty_date_time'] = format_pretty_date_time
|
||||
jinja_env.filters['pretty_duration'] = format_pretty_duration
|
||||
jinja_env.filters['undertitle'] = format_undertitle
|
||||
jinja_env.filters['hide_none'] = do_hide_none
|
||||
jinja_env.filters['pluralize'] = do_pluralize
|
||||
@@ -212,6 +225,7 @@ def setup_jinja_env(jinja_env, app_config: dict):
|
||||
jinja_env.filters['yesno'] = do_yesno
|
||||
jinja_env.filters['repr'] = repr
|
||||
jinja_env.filters['urljoin'] = functools.partial(urllib.parse.urljoin, allow_fragments=True)
|
||||
jinja_env.filters['json'] = do_json
|
||||
jinja_env.globals['url_for_node'] = do_url_for_node
|
||||
jinja_env.globals['abs_url'] = functools.partial(flask.url_for,
|
||||
_external=True,
|
||||
|
@@ -21,7 +21,7 @@ def attachment_form_group_create(schema_prop):
|
||||
def _attachment_build_single_field(schema_prop):
|
||||
# Ugly hard-coded schema.
|
||||
fake_schema = {
|
||||
'slug': schema_prop['propertyschema'],
|
||||
'slug': schema_prop['keyschema'],
|
||||
'oid': schema_prop['valueschema']['schema']['oid'],
|
||||
}
|
||||
file_select_form_group = build_file_select_form(fake_schema)
|
||||
|
@@ -145,12 +145,21 @@ def comments_for_node(node_id):
|
||||
|
||||
|
||||
def render_comments_for_node(node_id: str, *, can_post_comments: bool):
|
||||
"""Render the list of comments for a node."""
|
||||
"""Render the list of comments for a node.
|
||||
|
||||
Comments are first sorted by confidence, see:
|
||||
https://redditblog.com/2009/10/15/reddits-new-comment-sorting-system/
|
||||
and then by creation date.
|
||||
"""
|
||||
|
||||
# TODO(fsiddi) Implement confidence calculation on node rating in Pillar core.
|
||||
# Currently this feature is being developed in the Dillo extension.
|
||||
api = system_util.pillar_api()
|
||||
|
||||
# Query for all children, i.e. comments on the node.
|
||||
comments = Node.all({
|
||||
'where': {'node_type': 'comment', 'parent': node_id},
|
||||
'sort': [('properties.confidence', -1), ('_created', -1)],
|
||||
}, api=api)
|
||||
|
||||
def enrich(some_comment):
|
||||
@@ -171,6 +180,7 @@ def render_comments_for_node(node_id: str, *, can_post_comments: bool):
|
||||
# Query for all grandchildren, i.e. replies to comments on the node.
|
||||
comment['_replies'] = Node.all({
|
||||
'where': {'node_type': 'comment', 'parent': comment['_id']},
|
||||
'sort': [('properties.confidence', -1), ('_created', -1)],
|
||||
}, api=api)
|
||||
|
||||
enrich(comment)
|
||||
|
@@ -19,6 +19,7 @@ from pillar.web.nodes.routes import url_for_node
|
||||
from pillar.web.nodes.forms import get_node_form
|
||||
import pillar.web.nodes.attachments
|
||||
from pillar.web.projects.routes import project_update_nodes_list
|
||||
from pillar.web.projects.routes import project_navigation_links
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
@@ -61,16 +62,10 @@ def posts_view(project_id=None, project_url=None, url=None, *, archive=False, pa
|
||||
post.picture = get_file(post.picture, api=api)
|
||||
post.url = url_for_node(node=post)
|
||||
|
||||
# Use the *_main_project.html template for the main blog
|
||||
is_main_project = project_id == current_app.config['MAIN_PROJECT_ID']
|
||||
main_project_template = '_main_project' if is_main_project else ''
|
||||
main_project_template = '_main_project'
|
||||
index_arch = 'archive' if archive else 'index'
|
||||
template_path = f'nodes/custom/blog/{index_arch}{main_project_template}.html',
|
||||
template_path = f'nodes/custom/blog/{index_arch}.html',
|
||||
|
||||
if url:
|
||||
template_path = f'nodes/custom/post/view{main_project_template}.html',
|
||||
|
||||
post = Node.find_one({
|
||||
'where': {'parent': blog._id, 'properties.url': url},
|
||||
'embedded': {'node_type': 1, 'user': 1},
|
||||
@@ -95,6 +90,7 @@ def posts_view(project_id=None, project_url=None, url=None, *, archive=False, pa
|
||||
can_create_blog_posts = project.node_type_has_method('post', 'POST', api=api)
|
||||
|
||||
# Use functools.partial so we can later pass page=X.
|
||||
is_main_project = project_id == current_app.config['MAIN_PROJECT_ID']
|
||||
if is_main_project:
|
||||
url_func = functools.partial(url_for, 'main.main_blog_archive')
|
||||
else:
|
||||
@@ -112,24 +108,19 @@ def posts_view(project_id=None, project_url=None, url=None, *, archive=False, pa
|
||||
else:
|
||||
project.blog_archive_prev = None
|
||||
|
||||
title = 'blog_main' if is_main_project else 'blog'
|
||||
|
||||
pages = Node.all({
|
||||
'where': {'project': project._id, 'node_type': 'page'},
|
||||
'projection': {'name': 1}}, api=api)
|
||||
navigation_links = project_navigation_links(project, api)
|
||||
|
||||
return render_template(
|
||||
template_path,
|
||||
blog=blog,
|
||||
node=post,
|
||||
node=post, # node is used by the generic comments rendering (see custom/_scripts.pug)
|
||||
posts=posts._items,
|
||||
posts_meta=pmeta,
|
||||
more_posts_available=pmeta['total'] > pmeta['max_results'],
|
||||
project=project,
|
||||
title=title,
|
||||
node_type_post=project.get_node_type('post'),
|
||||
can_create_blog_posts=can_create_blog_posts,
|
||||
pages=pages._items,
|
||||
navigation_links=navigation_links,
|
||||
api=api)
|
||||
|
||||
|
||||
|
@@ -94,6 +94,16 @@ def find_for_post(project, node):
|
||||
url=node.properties.url)
|
||||
|
||||
|
||||
@register_node_finder('page')
|
||||
def find_for_page(project, node):
|
||||
"""Returns the URL for a page."""
|
||||
|
||||
project_id = project['_id']
|
||||
|
||||
the_project = project_url(project_id, project=project)
|
||||
return url_for('projects.view_node', project_url=the_project.url, node_id=node.properties.url)
|
||||
|
||||
|
||||
def find_for_other(project, node):
|
||||
"""Fallback: Assets, textures, and other node types.
|
||||
|
||||
|
@@ -1,10 +1,11 @@
|
||||
import functools
|
||||
import logging
|
||||
import typing
|
||||
|
||||
from datetime import datetime
|
||||
from datetime import date
|
||||
import pillarsdk
|
||||
from flask import current_app
|
||||
from flask_wtf import Form
|
||||
from flask_wtf import FlaskForm
|
||||
from wtforms import StringField
|
||||
from wtforms import DateField
|
||||
from wtforms import SelectField
|
||||
@@ -17,6 +18,8 @@ from wtforms import DateTimeField
|
||||
from wtforms import SelectMultipleField
|
||||
from wtforms import FieldList
|
||||
from wtforms.validators import DataRequired
|
||||
|
||||
from pillar import current_app
|
||||
from pillar.web.utils import system_util
|
||||
from pillar.web.utils.forms import FileSelectField
|
||||
from pillar.web.utils.forms import CustomFormField
|
||||
@@ -44,6 +47,13 @@ def iter_node_properties(node_type):
|
||||
yield prop_name, prop_schema, prop_fschema
|
||||
|
||||
|
||||
@functools.lru_cache(maxsize=1)
|
||||
def tag_choices() -> typing.List[typing.Tuple[str, str]]:
|
||||
"""Return (value, label) tuples for the NODE_TAGS config setting."""
|
||||
tags = current_app.config.get('NODE_TAGS') or []
|
||||
return [(tag, tag.title()) for tag in tags] # (value, label) tuples
|
||||
|
||||
|
||||
def add_form_properties(form_class, node_type):
|
||||
"""Add fields to a form based on the node and form schema provided.
|
||||
:type node_schema: dict
|
||||
@@ -60,7 +70,9 @@ def add_form_properties(form_class, node_type):
|
||||
# Recursive call if detects a dict
|
||||
field_type = schema_prop['type']
|
||||
|
||||
if field_type == 'dict':
|
||||
if prop_name == 'tags' and field_type == 'list':
|
||||
field = SelectMultipleField(choices=tag_choices())
|
||||
elif field_type == 'dict':
|
||||
assert prop_name == 'attachments'
|
||||
field = attachments.attachment_form_group_create(schema_prop)
|
||||
elif field_type == 'list':
|
||||
@@ -110,7 +122,7 @@ def get_node_form(node_type):
|
||||
:param node_type: Describes the node type via dyn_schema, form_schema and
|
||||
parent
|
||||
"""
|
||||
class ProceduralForm(Form):
|
||||
class ProceduralForm(FlaskForm):
|
||||
pass
|
||||
|
||||
parent_prop = node_type['parent']
|
||||
|
@@ -1,4 +1,4 @@
|
||||
from flask_wtf import Form
|
||||
from flask_wtf import FlaskForm
|
||||
from wtforms import StringField
|
||||
from wtforms import BooleanField
|
||||
from wtforms import HiddenField
|
||||
@@ -12,7 +12,7 @@ from pillar.web import system_util
|
||||
from pillar.web.utils.forms import FileSelectField, JSONRequired
|
||||
|
||||
|
||||
class ProjectForm(Form):
|
||||
class ProjectForm(FlaskForm):
|
||||
project_id = HiddenField('project_id', validators=[DataRequired()])
|
||||
name = StringField('Name', validators=[DataRequired()])
|
||||
url = StringField('Url', validators=[DataRequired()])
|
||||
@@ -32,7 +32,7 @@ class ProjectForm(Form):
|
||||
picture_square = FileSelectField('Picture square', file_format='image')
|
||||
|
||||
def validate(self):
|
||||
rv = Form.validate(self)
|
||||
rv = FlaskForm.validate(self)
|
||||
if not rv:
|
||||
return False
|
||||
|
||||
@@ -54,7 +54,7 @@ class ProjectForm(Form):
|
||||
return True
|
||||
|
||||
|
||||
class NodeTypeForm(Form):
|
||||
class NodeTypeForm(FlaskForm):
|
||||
project_id = HiddenField('project_id', validators=[DataRequired()])
|
||||
name = StringField('Name', validators=[DataRequired()])
|
||||
parent = StringField('Parent')
|
||||
|
@@ -24,6 +24,7 @@ from pillar import current_app
|
||||
from pillar.api.utils import utcnow
|
||||
from pillar.web import system_util
|
||||
from pillar.web import utils
|
||||
from pillar.web.nodes import finders
|
||||
from pillar.web.utils.jstree import jstree_get_children
|
||||
import pillar.extension
|
||||
|
||||
@@ -302,6 +303,51 @@ def view(project_url):
|
||||
'header_video_node': header_video_node})
|
||||
|
||||
|
||||
def project_navigation_links(project: typing.Type[Project], api) -> list:
|
||||
"""Returns a list of nodes for the project, for top navigation display.
|
||||
|
||||
Args:
|
||||
project: A Project object.
|
||||
api: the api client credential.
|
||||
|
||||
Returns:
|
||||
A list of links for the Project.
|
||||
For example we display a link to the project blog if present, as well
|
||||
as pages. The list is structured as follows:
|
||||
|
||||
[{'url': '/p/spring/about', 'label': 'About'},
|
||||
{'url': '/p/spring/blog', 'label': 'Blog'}]
|
||||
"""
|
||||
|
||||
links = []
|
||||
|
||||
# Fetch the blog
|
||||
blog = Node.find_first({
|
||||
'where': {'project': project._id, 'node_type': 'blog', '_deleted': {'$ne': True}},
|
||||
'projection': {
|
||||
'name': 1,
|
||||
}
|
||||
}, api=api)
|
||||
|
||||
if blog:
|
||||
links.append({'url': finders.find_url_for_node(blog), 'label': blog.name, 'slug': 'blog'})
|
||||
|
||||
# Fetch pages
|
||||
pages = Node.all({
|
||||
'where': {'project': project._id, 'node_type': 'page', '_deleted': {'$ne': True}},
|
||||
'projection': {
|
||||
'name': 1,
|
||||
'properties.url': 1
|
||||
}
|
||||
}, api=api)
|
||||
|
||||
# Process the results and append the links to the list
|
||||
for p in pages._items:
|
||||
links.append({'url': finders.find_url_for_node(p), 'label': p.name, 'slug': p.properties.url})
|
||||
|
||||
return links
|
||||
|
||||
|
||||
def render_project(project, api, extra_context=None, template_name=None):
|
||||
project.picture_square = utils.get_file(project.picture_square, api=api)
|
||||
project.picture_header = utils.get_file(project.picture_header, api=api)
|
||||
@@ -315,6 +361,7 @@ def render_project(project, api, extra_context=None, template_name=None):
|
||||
# Construct query parameters outside the loop.
|
||||
projection = {'name': 1, 'user': 1, 'node_type': 1, 'project': 1,
|
||||
'properties.url': 1, 'properties.content_type': 1,
|
||||
'properties.duration_seconds': 1,
|
||||
'picture': 1}
|
||||
params = {'projection': projection, 'embedded': {'user': 1}}
|
||||
|
||||
@@ -370,6 +417,8 @@ def render_project(project, api, extra_context=None, template_name=None):
|
||||
|
||||
extension_sidebar_links = current_app.extension_sidebar_links(project)
|
||||
|
||||
navigation_links = project_navigation_links(project, api)
|
||||
|
||||
return render_template(template_name,
|
||||
api=api,
|
||||
project=project,
|
||||
@@ -378,6 +427,7 @@ def render_project(project, api, extra_context=None, template_name=None):
|
||||
show_project=True,
|
||||
og_picture=project.picture_header,
|
||||
activity_stream=activity_stream,
|
||||
navigation_links=navigation_links,
|
||||
extension_sidebar_links=extension_sidebar_links,
|
||||
**extra_context)
|
||||
|
||||
@@ -416,6 +466,7 @@ def view_node(project_url, node_id):
|
||||
api = system_util.pillar_api()
|
||||
# First we check if it's a simple string, in which case we are looking for
|
||||
# a static page. Maybe we could use bson.objectid.ObjectId.is_valid(node_id)
|
||||
project: typing.Optional[Project] = None
|
||||
if not utils.is_valid_id(node_id):
|
||||
# raise wz_exceptions.NotFound('No such node')
|
||||
project, node = render_node_page(project_url, node_id, api)
|
||||
@@ -433,30 +484,28 @@ def view_node(project_url, node_id):
|
||||
project = Project.find_one({'where': {"url": project_url, '_id': node.project}},
|
||||
api=api)
|
||||
except ResourceNotFound:
|
||||
# In theatre mode, we don't need access to the project at all.
|
||||
if theatre_mode:
|
||||
project = None
|
||||
pass # In theatre mode, we don't need access to the project at all.
|
||||
else:
|
||||
raise wz_exceptions.NotFound('No such project')
|
||||
|
||||
navigation_links = []
|
||||
og_picture = node.picture = utils.get_file(node.picture, api=api)
|
||||
if project:
|
||||
if not node.picture:
|
||||
og_picture = utils.get_file(project.picture_header, api=api)
|
||||
project.picture_square = utils.get_file(project.picture_square, api=api)
|
||||
navigation_links = project_navigation_links(project, api)
|
||||
|
||||
# Append _theatre to load the proper template
|
||||
theatre = '_theatre' if theatre_mode else ''
|
||||
|
||||
if node.node_type == 'page':
|
||||
pages = Node.all({
|
||||
'where': {'project': project._id, 'node_type': 'page'},
|
||||
'projection': {'name': 1}}, api=api)
|
||||
return render_template('nodes/custom/page/view_embed.html',
|
||||
api=api,
|
||||
node=node,
|
||||
project=project,
|
||||
pages=pages._items,
|
||||
navigation_links=navigation_links,
|
||||
og_picture=og_picture,)
|
||||
|
||||
extension_sidebar_links = current_app.extension_sidebar_links(project)
|
||||
@@ -468,6 +517,7 @@ def view_node(project_url, node_id):
|
||||
show_node=True,
|
||||
show_project=False,
|
||||
og_picture=og_picture,
|
||||
navigation_links=navigation_links,
|
||||
extension_sidebar_links=extension_sidebar_links)
|
||||
|
||||
|
||||
|
@@ -872,12 +872,6 @@
|
||||
"code": 61930,
|
||||
"src": "fontawesome"
|
||||
},
|
||||
{
|
||||
"uid": "31972e4e9d080eaa796290349ae6c1fd",
|
||||
"css": "users",
|
||||
"code": 59502,
|
||||
"src": "fontawesome"
|
||||
},
|
||||
{
|
||||
"uid": "c8585e1e5b0467f28b70bce765d5840c",
|
||||
"css": "clipboard-copy",
|
||||
@@ -990,6 +984,30 @@
|
||||
"code": 59394,
|
||||
"src": "entypo"
|
||||
},
|
||||
{
|
||||
"uid": "347c38a8b96a509270fdcabc951e7571",
|
||||
"css": "database",
|
||||
"code": 61888,
|
||||
"src": "fontawesome"
|
||||
},
|
||||
{
|
||||
"uid": "3a6f0140c3a390bdb203f56d1bfdefcb",
|
||||
"css": "speed",
|
||||
"code": 59471,
|
||||
"src": "entypo"
|
||||
},
|
||||
{
|
||||
"uid": "4c1ef492f1d2c39a2250ae457cee2a6e",
|
||||
"css": "social-instagram",
|
||||
"code": 61805,
|
||||
"src": "fontawesome"
|
||||
},
|
||||
{
|
||||
"uid": "e36d581e4f2844db345bddc205d15dda",
|
||||
"css": "users",
|
||||
"code": 59507,
|
||||
"src": "elusive"
|
||||
},
|
||||
{
|
||||
"uid": "053a214a098a9453877363eeb45f004e",
|
||||
"css": "log-in",
|
||||
|
Binary file not shown.
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -33,7 +33,8 @@ def get_user_info(user_id):
|
||||
# TODO: put those fields into a config var or module-level global.
|
||||
return {'email': user.email,
|
||||
'full_name': user.full_name,
|
||||
'username': user.username}
|
||||
'username': user.username,
|
||||
'badges_html': (user.badges and user.badges.html) or ''}
|
||||
|
||||
|
||||
def setup_app(app):
|
||||
|
@@ -12,14 +12,6 @@ from pillar.sdk import FlaskInternalApi
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def blender_id_endpoint():
|
||||
"""Gets the endpoint for the authentication API. If the env variable
|
||||
is defined, it's possible to override the (default) production address.
|
||||
"""
|
||||
return os.environ.get('BLENDER_ID_ENDPOINT',
|
||||
"https://www.blender.org/id").rstrip('/')
|
||||
|
||||
|
||||
def pillar_server_endpoint():
|
||||
"""Gets the endpoint for the authentication API. If the env variable
|
||||
is defined, we will use the one from the config object.
|
||||
|
@@ -1,5 +1,5 @@
|
||||
from flask_login import current_user
|
||||
from flask_wtf import Form
|
||||
from flask_wtf import FlaskForm
|
||||
from pillar.web import system_util
|
||||
from pillarsdk.users import User
|
||||
|
||||
@@ -14,7 +14,7 @@ from wtforms.validators import Regexp
|
||||
import wtforms.validators as wtvalid
|
||||
|
||||
|
||||
class UserLoginForm(Form):
|
||||
class UserLoginForm(FlaskForm):
|
||||
username = StringField('Username', validators=[DataRequired()])
|
||||
password = PasswordField('Password', validators=[DataRequired()])
|
||||
remember_me = BooleanField('Remember Me')
|
||||
@@ -23,7 +23,7 @@ class UserLoginForm(Form):
|
||||
super(UserLoginForm, self).__init__(csrf_enabled=False, *args, **kwargs)
|
||||
|
||||
|
||||
class UserProfileForm(Form):
|
||||
class UserProfileForm(FlaskForm):
|
||||
username = StringField('Username', validators=[DataRequired(), Length(
|
||||
min=3, max=128, message="Min. 3, max. 128 chars please"), Regexp(
|
||||
r'^[\w.@+-]+$', message="Please do not use spaces")])
|
||||
@@ -52,7 +52,7 @@ class UserProfileForm(Form):
|
||||
return True
|
||||
|
||||
|
||||
class UserSettingsEmailsForm(Form):
|
||||
class UserSettingsEmailsForm(FlaskForm):
|
||||
choices = [
|
||||
(1, 'Keep me updated with Blender Cloud news.'),
|
||||
(0, 'Do not mail me news update.')]
|
||||
@@ -74,7 +74,7 @@ class RolesField(SelectMultipleField):
|
||||
return current_app.user_roles
|
||||
|
||||
|
||||
class UserEditForm(Form):
|
||||
class UserEditForm(FlaskForm):
|
||||
roles = RolesField('Roles')
|
||||
email = StringField(
|
||||
validators=[wtvalid.DataRequired(), wtvalid.Email()],
|
||||
|
@@ -48,6 +48,10 @@ def oauth_authorize(provider):
|
||||
|
||||
@blueprint.route('/oauth/<provider>/authorized')
|
||||
def oauth_callback(provider):
|
||||
import datetime
|
||||
from pillar.api.utils.authentication import store_token
|
||||
from pillar.api.utils import utcnow
|
||||
|
||||
if current_user.is_authenticated:
|
||||
return redirect(url_for('main.homepage'))
|
||||
|
||||
@@ -65,7 +69,17 @@ def oauth_callback(provider):
|
||||
user_info = {'id': oauth_user.id, 'email': oauth_user.email, 'full_name': ''}
|
||||
db_user = find_user_in_db(user_info, provider=provider)
|
||||
db_id, status = upsert_user(db_user)
|
||||
token = generate_and_store_token(db_id)
|
||||
|
||||
# TODO(Sybren): If the user doesn't have any badges, but the access token
|
||||
# does have 'badge' scope, we should fetch the badges in the background.
|
||||
|
||||
if oauth_user.access_token:
|
||||
# TODO(Sybren): make nr of days configurable, or get from OAuthSignIn subclass.
|
||||
token_expiry = utcnow() + datetime.timedelta(days=15)
|
||||
token = store_token(db_id, oauth_user.access_token, token_expiry,
|
||||
oauth_scopes=oauth_user.scopes)
|
||||
else:
|
||||
token = generate_and_store_token(db_id)
|
||||
|
||||
# Login user
|
||||
pillar.auth.login_user(token['token'], load_from_db=True)
|
||||
|
@@ -62,7 +62,7 @@ def jstree_get_children(node_id, project_id=None):
|
||||
'where': {
|
||||
'$and': [
|
||||
{'node_type': {'$regex': '^(?!attract_)'}},
|
||||
{'node_type': {'$not': {'$in': ['comment', 'post']}}},
|
||||
{'node_type': {'$not': {'$in': ['comment', 'post', 'blog', 'page']}}},
|
||||
],
|
||||
}
|
||||
}
|
||||
|
@@ -1,21 +1,22 @@
|
||||
# Primary requirements
|
||||
-r ../pillar-python-sdk/requirements.txt
|
||||
|
||||
attrs==16.2.0
|
||||
attrs==18.2.0
|
||||
algoliasearch==1.12.0
|
||||
bcrypt==3.1.3
|
||||
blinker==1.4
|
||||
bleach==1.4.3
|
||||
celery[redis]==4.0.2
|
||||
bleach==2.1.3
|
||||
celery[redis]==4.2.1
|
||||
CommonMark==0.7.2
|
||||
elasticsearch==6.1.1
|
||||
elasticsearch-dsl==6.1.0
|
||||
Eve==0.7.3
|
||||
Flask==0.12
|
||||
Eve==0.8
|
||||
Flask==1.0.2
|
||||
Flask-Babel==0.11.2
|
||||
Flask-Cache==0.13.1
|
||||
Flask-Caching==1.4.0
|
||||
Flask-DebugToolbar==0.10.1
|
||||
Flask-Script==2.0.6
|
||||
Flask-Login==0.3.2
|
||||
Flask-Login==0.4.1
|
||||
Flask-WTF==0.14.2
|
||||
gcloud==0.12.0
|
||||
google-apitools==0.4.11
|
||||
@@ -27,37 +28,49 @@ Pillow==4.1.1
|
||||
python-dateutil==2.5.3
|
||||
rauth==0.7.3
|
||||
raven[flask]==6.3.0
|
||||
requests==2.13.0
|
||||
redis==2.10.5
|
||||
shortcodes==2.5.0
|
||||
WebOb==1.5.0
|
||||
wheel==0.29.0
|
||||
zencoder==0.6.5
|
||||
|
||||
|
||||
# Secondary requirements
|
||||
amqp==2.1.4
|
||||
billiard==3.5.0.2
|
||||
Flask-PyMongo==0.4.1
|
||||
-e git+https://github.com/armadillica/cerberus.git@sybren-0.9#egg=Cerberus
|
||||
Events==0.2.2
|
||||
future==0.15.2
|
||||
html5lib==0.9999999
|
||||
googleapis-common-protos==1.1.0
|
||||
amqp==2.3.2
|
||||
asn1crypto==0.24.0
|
||||
Babel==2.6.0
|
||||
billiard==3.5.0.4
|
||||
Cerberus==1.2
|
||||
cffi==1.10.0
|
||||
click==6.7
|
||||
cryptography==2.0.3
|
||||
Events==0.3
|
||||
future==0.16.0
|
||||
googleapis-common-protos==1.5.3
|
||||
html5lib==1.0.1
|
||||
idna==2.5
|
||||
ipaddress==1.0.22
|
||||
itsdangerous==0.24
|
||||
Jinja2==2.9.6
|
||||
kombu==4.0.2
|
||||
oauth2client==2.0.2
|
||||
oauthlib==2.0.1
|
||||
olefile==0.44
|
||||
protobuf==3.0.0b2.post2
|
||||
protorpc==0.11.1
|
||||
pyasn1-modules==0.0.8
|
||||
pymongo==3.4.0
|
||||
pytz==2017.2
|
||||
requests-oauthlib==0.7.0
|
||||
Jinja2==2.10
|
||||
kombu==4.2.1
|
||||
oauth2client==4.1.2
|
||||
oauthlib==2.1.0
|
||||
olefile==0.45.1
|
||||
protobuf==3.6.0
|
||||
protorpc==0.12.0
|
||||
pyasn1==0.4.4
|
||||
pyasn1-modules==0.2.2
|
||||
pycparser==2.17
|
||||
pymongo==3.7.0
|
||||
pyOpenSSL==16.2.0
|
||||
pytz==2018.5
|
||||
requests-oauthlib==1.0.0
|
||||
rsa==3.4.2
|
||||
simplejson==3.10.0
|
||||
simplejson==3.16.0
|
||||
six==1.10.0
|
||||
urllib3==1.22
|
||||
vine==1.1.3
|
||||
WTForms==2.1
|
||||
Werkzeug==0.11.15
|
||||
vine==1.1.4
|
||||
webencodings==0.5.1
|
||||
Werkzeug==0.14.1
|
||||
WTForms==2.2.1
|
||||
|
2
setup.py
2
setup.py
@@ -35,7 +35,7 @@ setuptools.setup(
|
||||
install_requires=[
|
||||
'Flask>=0.12',
|
||||
'Eve>=0.7.3',
|
||||
'Flask-Cache>=0.13.1',
|
||||
'Flask-Caching>=1.4.0',
|
||||
'Flask-Script>=2.0.5',
|
||||
'Flask-Login>=0.3.2',
|
||||
'Flask-OAuthlib>=0.9.3',
|
||||
|
@@ -11,10 +11,8 @@ $(document).ready(function() {
|
||||
var what = '';
|
||||
|
||||
// Templates binding
|
||||
var hitTemplate = Hogan.compile($('#hit-template').text());
|
||||
var statsTemplate = Hogan.compile($('#stats-template').text());
|
||||
var facetTemplate = Hogan.compile($('#facet-template').text());
|
||||
var sliderTemplate = Hogan.compile($('#slider-template').text());
|
||||
var paginationTemplate = Hogan.compile($('#pagination-template').text());
|
||||
|
||||
// defined in tutti/4_search.js
|
||||
@@ -47,6 +45,7 @@ $(document).ready(function() {
|
||||
renderFacets(content);
|
||||
renderPagination(content);
|
||||
renderFirstHit($(hits).children('.search-hit:first'));
|
||||
updateUrlParams();
|
||||
});
|
||||
|
||||
/***************
|
||||
@@ -66,7 +65,7 @@ $(document).ready(function() {
|
||||
|
||||
window.setTimeout(function() {
|
||||
// Ignore getting that first result when there is none.
|
||||
var hit_id = firstHit.attr('data-hit-id');
|
||||
var hit_id = firstHit.attr('data-node-id');
|
||||
if (hit_id === undefined) {
|
||||
done();
|
||||
return;
|
||||
@@ -87,12 +86,6 @@ $(document).ready(function() {
|
||||
// Initial search
|
||||
initWithUrlParams();
|
||||
|
||||
function convertTimestamp(iso8601) {
|
||||
var d = new Date(iso8601)
|
||||
return d.toLocaleDateString();
|
||||
}
|
||||
|
||||
|
||||
function renderStats(content) {
|
||||
var stats = {
|
||||
nbHits: numberWithDelimiter(content.count),
|
||||
@@ -103,20 +96,17 @@ $(document).ready(function() {
|
||||
}
|
||||
|
||||
function renderHits(content) {
|
||||
var hitsHtml = '';
|
||||
for (var i = 0; i < content.hits.length; ++i) {
|
||||
var created = content.hits[i].created_at;
|
||||
if (created) {
|
||||
content.hits[i].created_at = convertTimestamp(created);
|
||||
}
|
||||
var updated = content.hits[i].updated_at;
|
||||
if (updated) {
|
||||
content.hits[i].updated_at = convertTimestamp(updated);
|
||||
}
|
||||
hitsHtml += hitTemplate.render(content.hits[i]);
|
||||
$hits.empty();
|
||||
if (content.hits.length === 0) {
|
||||
$hits.html('<p id="no-hits">We didn\'t find any items. Try searching something else.</p>');
|
||||
}
|
||||
else {
|
||||
listof$hits = content.hits.map(function(hit){
|
||||
return pillar.templates.Component.create$listItem(hit)
|
||||
.addClass('js-search-hit cursor-pointer search-hit');
|
||||
})
|
||||
$hits.append(listof$hits);
|
||||
}
|
||||
if (content.hits.length === 0) hitsHtml = '<p id="no-hits">We didn\'t find any items. Try searching something else.</p>';
|
||||
$hits.html(hitsHtml);
|
||||
}
|
||||
|
||||
function renderFacets(content) {
|
||||
@@ -133,7 +123,7 @@ $(document).ready(function() {
|
||||
var refined = search.isRefined(label, item.key);
|
||||
values.push({
|
||||
facet: label,
|
||||
label: item.key,
|
||||
label: item.key_as_string || item.key,
|
||||
value: item.key,
|
||||
count: item.doc_count,
|
||||
refined: refined,
|
||||
@@ -153,7 +143,7 @@ $(document).ready(function() {
|
||||
|
||||
buckets.forEach(storeValue(values, label));
|
||||
facets.push({
|
||||
title: label,
|
||||
title: removeUnderscore(label),
|
||||
values: values.slice(0),
|
||||
});
|
||||
}
|
||||
@@ -218,6 +208,9 @@ $(document).ready(function() {
|
||||
$pagination.html(paginationTemplate.render(pagination));
|
||||
}
|
||||
|
||||
function removeUnderscore(s) {
|
||||
return s.replace(/_/g, ' ')
|
||||
}
|
||||
|
||||
// Event bindings
|
||||
// Click binding
|
||||
@@ -300,37 +293,46 @@ $(document).ready(function() {
|
||||
};
|
||||
|
||||
function initWithUrlParams() {
|
||||
var sPageURL = location.hash;
|
||||
if (!sPageURL || sPageURL.length === 0) {
|
||||
return true;
|
||||
var pageURL = decodeURIComponent(window.location.search.substring(1)),
|
||||
urlVariables = pageURL.split('&'),
|
||||
query,
|
||||
i;
|
||||
for (i = 0; i < urlVariables.length; i++) {
|
||||
var parameterPair = urlVariables[i].split('='),
|
||||
key = parameterPair[0],
|
||||
sValue = parameterPair[1];
|
||||
if (!key) continue;
|
||||
if (key === 'q') {
|
||||
query = sValue;
|
||||
continue;
|
||||
}
|
||||
if (key === 'page') {
|
||||
var page = Number.parseInt(sValue)
|
||||
search.setCurrentPage(isNaN(page) ? 0 : page)
|
||||
continue;
|
||||
}
|
||||
if (key === 'project') {
|
||||
continue; // We take the project from the path
|
||||
}
|
||||
if (sValue !== undefined) {
|
||||
var iValue = Number.parseInt(sValue),
|
||||
value = isNaN(iValue) ? sValue : iValue;
|
||||
search.toggleTerm(key, value);
|
||||
continue;
|
||||
}
|
||||
console.log('Unhandled url parameter pair:', parameterPair)
|
||||
}
|
||||
var sURLVariables = sPageURL.split('&');
|
||||
if (!sURLVariables || sURLVariables.length === 0) {
|
||||
return true;
|
||||
}
|
||||
var query = decodeURIComponent(sURLVariables[0].split('=')[1]);
|
||||
$inputField.val(query);
|
||||
search.setQuery(query, what);
|
||||
|
||||
for (var i = 2; i < sURLVariables.length; i++) {
|
||||
var sParameterName = sURLVariables[i].split('=');
|
||||
var facet = decodeURIComponent(sParameterName[0]);
|
||||
var value = decodeURIComponent(sParameterName[1]);
|
||||
}
|
||||
// Page has to be set in the end to avoid being overwritten
|
||||
var page = decodeURIComponent(sURLVariables[1].split('=')[1]) - 1;
|
||||
search.setCurrentPage(page);
|
||||
do_search(query || '');
|
||||
}
|
||||
|
||||
function setURLParams(state) {
|
||||
var urlParams = '?';
|
||||
var currentQuery = state.query;
|
||||
urlParams += 'q=' + encodeURIComponent(currentQuery);
|
||||
var currentPage = state.page + 1;
|
||||
urlParams += '&page=' + currentPage;
|
||||
location.replace(urlParams);
|
||||
function updateUrlParams() {
|
||||
var prevState = history.state,
|
||||
prevTitle = document.title,
|
||||
params = search.getParams(),
|
||||
newUrl = window.location.pathname + '?';
|
||||
delete params['project'] // We take the project from the path
|
||||
newUrl += jQuery.param(params)
|
||||
history.replaceState(prevState, prevTitle, newUrl);
|
||||
}
|
||||
|
||||
// do empty search to fill aggregations
|
||||
do_search('');
|
||||
});
|
||||
|
58
src/scripts/js/es6/common/quicksearch/MultiSearch.js
Normal file
58
src/scripts/js/es6/common/quicksearch/MultiSearch.js
Normal file
@@ -0,0 +1,58 @@
|
||||
import {SearchParams} from './SearchParams';
|
||||
|
||||
export class MultiSearch {
|
||||
constructor(kwargs) {
|
||||
this.uiUrl = kwargs['uiUrl']; // Url for advanced search
|
||||
this.apiUrl = kwargs['apiUrl']; // Url for api calls
|
||||
this.searchParams = MultiSearch.createMultiSearchParams(kwargs['searchParams']);
|
||||
this.q = '';
|
||||
}
|
||||
|
||||
setSearchWord(q) {
|
||||
this.q = q;
|
||||
this.searchParams.forEach((qsParam) => {
|
||||
qsParam.setSearchWord(q);
|
||||
});
|
||||
}
|
||||
|
||||
getSearchUrl() {
|
||||
return this.uiUrl + '?q=' + this.q;
|
||||
}
|
||||
|
||||
getAllParams() {
|
||||
let retval = $.map(this.searchParams, (msParams) => {
|
||||
return msParams.params;
|
||||
});
|
||||
return retval;
|
||||
}
|
||||
|
||||
parseResult(rawResult) {
|
||||
return $.map(rawResult, (subResult, index) => {
|
||||
let name = this.searchParams[index].name;
|
||||
let pStr = this.searchParams[index].getParamStr();
|
||||
let result = $.map(subResult.hits.hits, (hit) => {
|
||||
return hit._source;
|
||||
});
|
||||
return {
|
||||
name: name,
|
||||
url: this.uiUrl + '?' + pStr,
|
||||
result: result,
|
||||
hasResults: !!result.length
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
thenExecute() {
|
||||
let data = JSON.stringify(this.getAllParams());
|
||||
let rawAjax = $.getJSON(this.apiUrl, data);
|
||||
let prettyPromise = rawAjax.then(this.parseResult.bind(this));
|
||||
prettyPromise['abort'] = rawAjax.abort.bind(rawAjax); // Hack to be able to abort the promise down the road
|
||||
return prettyPromise;
|
||||
}
|
||||
|
||||
static createMultiSearchParams(argsList) {
|
||||
return $.map(argsList, (args) => {
|
||||
return new SearchParams(args);
|
||||
});
|
||||
}
|
||||
}
|
204
src/scripts/js/es6/common/quicksearch/QuickSearch.js
Normal file
204
src/scripts/js/es6/common/quicksearch/QuickSearch.js
Normal file
@@ -0,0 +1,204 @@
|
||||
import { create$noHits, create$results, create$input } from './templates'
|
||||
import {SearchFacade} from './SearchFacade';
|
||||
/**
|
||||
* QuickSearch : Interacts with the dom document
|
||||
* 1-SearchFacade : Controls which multisearch is active
|
||||
* *-MultiSearch : One multi search is typically Project or Cloud
|
||||
* *-SearchParams : The search params for the individual searches
|
||||
*/
|
||||
|
||||
export class QuickSearch {
|
||||
/**
|
||||
* Interacts with the dom document and deligates the input down to the SearchFacade
|
||||
* @param {selector string} searchToggle The quick-search toggle
|
||||
* @param {*} kwargs
|
||||
*/
|
||||
constructor(searchToggle, kwargs) {
|
||||
this.$body = $('body');
|
||||
this.$quickSearch = $('.quick-search');
|
||||
this.$inputComponent = $(kwargs['inputTarget']);
|
||||
this.$inputComponent.empty();
|
||||
this.$inputComponent.append(create$input(kwargs['searches']));
|
||||
this.$searchInput = this.$inputComponent.find('input');
|
||||
this.$searchSelect = this.$inputComponent.find('select');
|
||||
this.$resultTarget = $(kwargs['resultTarget']);
|
||||
this.$searchSymbol = this.$inputComponent.find('.qs-busy-symbol');
|
||||
this.searchFacade = new SearchFacade(kwargs['searches'] || {});
|
||||
this.$searchToggle = $(searchToggle);
|
||||
this.isBusy = false;
|
||||
this.attach();
|
||||
}
|
||||
|
||||
attach() {
|
||||
if (this.$searchSelect.length) {
|
||||
this.$searchSelect
|
||||
.change(this.execute.bind(this))
|
||||
.change(() => this.$searchInput.focus());
|
||||
this.$searchInput.addClass('multi-scope');
|
||||
}
|
||||
|
||||
this.$searchInput
|
||||
.keyup(this.onInputKeyUp.bind(this));
|
||||
|
||||
this.$inputComponent
|
||||
.on('pillar:workStart', () => {
|
||||
this.$searchSymbol.addClass('spinner')
|
||||
this.$searchSymbol.toggleClass('pi-spin pi-cancel')
|
||||
})
|
||||
.on('pillar:workStop', () => {
|
||||
this.$searchSymbol.removeClass('spinner')
|
||||
this.$searchSymbol.toggleClass('pi-spin pi-cancel')
|
||||
});
|
||||
|
||||
this.searchFacade.setOnResultCB(this.renderResult.bind(this));
|
||||
this.searchFacade.setOnFailureCB(this.onSearchFailed.bind(this));
|
||||
this.$searchToggle
|
||||
.one('click', this.execute.bind(this)); // Initial search executed once
|
||||
|
||||
this.registerShowGui();
|
||||
this.registerHideGui();
|
||||
}
|
||||
|
||||
registerShowGui() {
|
||||
this.$searchToggle
|
||||
.click((e) => {
|
||||
this.showGUI();
|
||||
e.stopPropagation();
|
||||
});
|
||||
}
|
||||
|
||||
registerHideGui() {
|
||||
this.$searchSymbol
|
||||
.click(() => {
|
||||
this.hideGUI();
|
||||
});
|
||||
this.$body.click((e) => {
|
||||
let $target = $(e.target);
|
||||
let isClickInResult = $target.hasClass('.qs-result') || !!$target.parents('.qs-result').length;
|
||||
let isClickInInput = $target.hasClass('.qs-input') || !!$target.parents('.qs-input').length;
|
||||
if (!isClickInResult && !isClickInInput) {
|
||||
this.hideGUI();
|
||||
}
|
||||
});
|
||||
$(document).keyup((e) => {
|
||||
if (e.key === 'Escape') {
|
||||
this.hideGUI();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
showGUI() {
|
||||
this.$body.addClass('has-overlay');
|
||||
this.$quickSearch.trigger('pillar:searchShow');
|
||||
this.$quickSearch.addClass('show');
|
||||
if (!this.$searchInput.is(':focus')) {
|
||||
this.$searchInput.focus();
|
||||
}
|
||||
}
|
||||
|
||||
hideGUI() {
|
||||
this.$body.removeClass('has-overlay');
|
||||
this.$searchToggle.addClass('pi-search');
|
||||
this.$searchInput.blur();
|
||||
this.$quickSearch.removeClass('show');
|
||||
this.$quickSearch.trigger('pillar:searchHidden');
|
||||
}
|
||||
|
||||
onInputKeyUp(e) {
|
||||
let newQ = this.$searchInput.val();
|
||||
let currQ = this.searchFacade.getSearchWord();
|
||||
this.searchFacade.setSearchWord(newQ);
|
||||
let searchUrl = this.searchFacade.getSearchUrl();
|
||||
if (e.key === 'Enter') {
|
||||
window.location.href = searchUrl;
|
||||
return;
|
||||
}
|
||||
if (newQ !== currQ) {
|
||||
this.execute();
|
||||
}
|
||||
}
|
||||
|
||||
execute() {
|
||||
this.busy(true);
|
||||
let scope = this.getScope();
|
||||
this.searchFacade.setCurrentScope(scope);
|
||||
let q = this.$searchInput.val();
|
||||
this.searchFacade.setSearchWord(q);
|
||||
this.searchFacade.execute();
|
||||
}
|
||||
|
||||
renderResult(results) {
|
||||
this.$resultTarget.empty();
|
||||
this.$resultTarget.append(this.create$result(results));
|
||||
this.busy(false);
|
||||
}
|
||||
|
||||
create$result(results) {
|
||||
let withHits = results.reduce((aggr, subResult) => {
|
||||
if (subResult.hasResults) {
|
||||
aggr.push(subResult);
|
||||
}
|
||||
return aggr;
|
||||
}, []);
|
||||
|
||||
if (!withHits.length) {
|
||||
return create$noHits(this.searchFacade.getSearchUrl());
|
||||
}
|
||||
return create$results(results, this.searchFacade.getSearchUrl());
|
||||
}
|
||||
|
||||
onSearchFailed(err) {
|
||||
toastr.error(xhrErrorResponseMessage(err), 'Unable to perform search:');
|
||||
this.busy(false);
|
||||
this.$inputComponent.trigger('pillar:failed', err);
|
||||
}
|
||||
|
||||
getScope() {
|
||||
return !!this.$searchSelect.length ? this.$searchSelect.val() : 'cloud';
|
||||
}
|
||||
|
||||
busy(val) {
|
||||
if (val !== this.isBusy) {
|
||||
var eventType = val ? 'pillar:workStart' : 'pillar:workStop';
|
||||
this.$inputComponent.trigger(eventType);
|
||||
}
|
||||
this.isBusy = val;
|
||||
}
|
||||
}
|
||||
|
||||
$.fn.extend({
|
||||
/**
|
||||
* $('#qs-toggle').quickSearch({
|
||||
* resultTarget: '#search-overlay',
|
||||
* inputTarget: '#qs-input',
|
||||
* searches: {
|
||||
* project: {
|
||||
* name: 'Project',
|
||||
* uiUrl: '{{ url_for("projects.search", project_url=project.url)}}',
|
||||
* apiUrl: '/api/newsearch/multisearch',
|
||||
* searchParams: [
|
||||
* {name: 'Assets', params: {project: '{{ project._id }}', node_type: 'asset'}},
|
||||
* {name: 'Blog', params: {project: '{{ project._id }}', node_type: 'post'}},
|
||||
* {name: 'Groups', params: {project: '{{ project._id }}', node_type: 'group'}},
|
||||
* ]
|
||||
* },
|
||||
* cloud: {
|
||||
* name: 'Cloud',
|
||||
* uiUrl: '/search',
|
||||
* apiUrl: '/api/newsearch/multisearch',
|
||||
* searchParams: [
|
||||
* {name: 'Assets', params: {node_type: 'asset'}},
|
||||
* {name: 'Blog', params: {node_type: 'post'}},
|
||||
* {name: 'Groups', params: {node_type: 'group'}},
|
||||
* ]
|
||||
* },
|
||||
* },
|
||||
* });
|
||||
* @param {*} kwargs
|
||||
*/
|
||||
quickSearch: function (kwargs) {
|
||||
$(this).each((i, qsElem) => {
|
||||
new QuickSearch(qsElem, kwargs);
|
||||
});
|
||||
}
|
||||
})
|
68
src/scripts/js/es6/common/quicksearch/SearchFacade.js
Normal file
68
src/scripts/js/es6/common/quicksearch/SearchFacade.js
Normal file
@@ -0,0 +1,68 @@
|
||||
import {MultiSearch} from './MultiSearch';
|
||||
|
||||
export class SearchFacade {
|
||||
/**
|
||||
* One SearchFacade holds n-number of MultiSearch objects, and delegates search requests to the active mutlisearch
|
||||
* @param {*} kwargs
|
||||
*/
|
||||
constructor(kwargs) {
|
||||
this.searches = SearchFacade.createMultiSearches(kwargs);
|
||||
this.currentScope = 'cloud'; // which multisearch to use
|
||||
this.currRequest;
|
||||
this.resultCB;
|
||||
this.failureCB;
|
||||
this.q = '';
|
||||
}
|
||||
|
||||
setSearchWord(q) {
|
||||
this.q = q;
|
||||
$.each(this.searches, (k, mSearch) => {
|
||||
mSearch.setSearchWord(q);
|
||||
});
|
||||
}
|
||||
|
||||
getSearchWord() {
|
||||
return this.q;
|
||||
}
|
||||
|
||||
getSearchUrl() {
|
||||
return this.searches[this.currentScope].getSearchUrl();
|
||||
}
|
||||
|
||||
setCurrentScope(scope) {
|
||||
this.currentScope = scope;
|
||||
}
|
||||
|
||||
execute() {
|
||||
if (this.currRequest) {
|
||||
this.currRequest.abort();
|
||||
}
|
||||
this.currRequest = this.searches[this.currentScope].thenExecute();
|
||||
this.currRequest
|
||||
.then((results) => {
|
||||
this.resultCB(results);
|
||||
})
|
||||
.fail((err, reason) => {
|
||||
if (reason == 'abort') {
|
||||
return;
|
||||
}
|
||||
this.failureCB(err);
|
||||
});
|
||||
}
|
||||
|
||||
setOnResultCB(cb) {
|
||||
this.resultCB = cb;
|
||||
}
|
||||
|
||||
setOnFailureCB(cb) {
|
||||
this.failureCB = cb;
|
||||
}
|
||||
|
||||
static createMultiSearches(kwargs) {
|
||||
var searches = {};
|
||||
$.each(kwargs, (key, value) => {
|
||||
searches[key] = new MultiSearch(value);
|
||||
});
|
||||
return searches;
|
||||
}
|
||||
}
|
14
src/scripts/js/es6/common/quicksearch/SearchParams.js
Normal file
14
src/scripts/js/es6/common/quicksearch/SearchParams.js
Normal file
@@ -0,0 +1,14 @@
|
||||
export class SearchParams {
|
||||
constructor(kwargs) {
|
||||
this.name = kwargs['name'] || '';
|
||||
this.params = kwargs['params'] || {};
|
||||
}
|
||||
|
||||
setSearchWord(q) {
|
||||
this.params['q'] = q || '';
|
||||
}
|
||||
|
||||
getParamStr() {
|
||||
return jQuery.param(this.params);
|
||||
}
|
||||
}
|
1
src/scripts/js/es6/common/quicksearch/init.js
Normal file
1
src/scripts/js/es6/common/quicksearch/init.js
Normal file
@@ -0,0 +1 @@
|
||||
export { QuickSearch } from './QuickSearch';
|
93
src/scripts/js/es6/common/quicksearch/templates.js
Normal file
93
src/scripts/js/es6/common/quicksearch/templates.js
Normal file
@@ -0,0 +1,93 @@
|
||||
/**
|
||||
* Creates the jQuery object that is rendered when nothing is found
|
||||
* @param {String} advancedUrl Url to the advanced search with the current query
|
||||
* @returns {$element} The jQuery element that is rendered wher there are no hits
|
||||
*/
|
||||
function create$noHits(advancedUrl) {
|
||||
return $('<div>')
|
||||
.addClass('qs-msg text-center p-3')
|
||||
.append(
|
||||
$('<div>')
|
||||
.addClass('h1 pi-displeased'),
|
||||
$('<div>')
|
||||
.addClass('h2')
|
||||
.append(
|
||||
$('<a>')
|
||||
.attr('href', advancedUrl)
|
||||
.text('Advanced search')
|
||||
)
|
||||
)
|
||||
}
|
||||
/**
|
||||
* Creates the jQuery object that is rendered as the search input
|
||||
* @param {Dict} searches The searches dict that is passed in on construction of the Quick-Search
|
||||
* @returns {$element} The jQuery object that renders the search input components.
|
||||
*/
|
||||
function create$input(searches) {
|
||||
let input = $('<input>')
|
||||
.addClass('qs-input')
|
||||
.attr('type', 'search')
|
||||
.attr('autocomplete', 'off')
|
||||
.attr('spellcheck', 'false')
|
||||
.attr('autocorrect', 'false')
|
||||
.attr('placeholder', 'Search...');
|
||||
let workingSymbol = $('<i>')
|
||||
.addClass('pi-cancel qs-busy-symbol');
|
||||
let inputComponent = [input, workingSymbol];
|
||||
if (Object.keys(searches).length > 1) {
|
||||
let i = 0;
|
||||
let select = $('<select>')
|
||||
.append(
|
||||
$.map(searches, (it, value) => {
|
||||
let option = $('<option>')
|
||||
.attr('value', value)
|
||||
.text(it['name']);
|
||||
if (i === 0) {
|
||||
option.attr('selected', 'selected');
|
||||
}
|
||||
i += 1;
|
||||
return option;
|
||||
})
|
||||
);
|
||||
inputComponent.push(select);
|
||||
}
|
||||
return inputComponent;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates the search result
|
||||
* @param {List} results
|
||||
* @param {String} advancedUrl
|
||||
* @returns {$element} The jQuery object that is rendered as the result
|
||||
*/
|
||||
function create$results(results, advancedUrl) {
|
||||
let $results = results.reduce((agg, res)=> {
|
||||
if(res['result'].length) {
|
||||
agg.push(
|
||||
$('<a>')
|
||||
.addClass('h4 mt-4 d-flex')
|
||||
.attr('href', res['url'])
|
||||
.text(res['name'])
|
||||
)
|
||||
agg.push(
|
||||
$('<div>')
|
||||
.addClass('card-deck card-deck-responsive card-padless js-asset-list p-3')
|
||||
.append(
|
||||
...pillar.templates.Nodes.createListOf$nodeItems(res['result'], 10, 0)
|
||||
)
|
||||
)
|
||||
}
|
||||
return agg;
|
||||
}, [])
|
||||
$results.push(
|
||||
$('<a>')
|
||||
.attr('href', advancedUrl)
|
||||
.text('Advanced search...')
|
||||
)
|
||||
|
||||
return $('<div>')
|
||||
.addClass('m-auto qs-result')
|
||||
.append(...$results)
|
||||
}
|
||||
|
||||
export { create$noHits, create$results, create$input }
|
124
src/scripts/js/es6/common/templates/__tests__/Assets.test.js
Normal file
124
src/scripts/js/es6/common/templates/__tests__/Assets.test.js
Normal file
@@ -0,0 +1,124 @@
|
||||
import { Assets } from '../nodes/Assets'
|
||||
|
||||
jest.useFakeTimers();
|
||||
|
||||
describe('Assets', () => {
|
||||
describe('create$listItem', () => {
|
||||
let nodeDoc;
|
||||
let spyGet;
|
||||
beforeEach(()=>{
|
||||
// mock now to get a stable pretty printed created
|
||||
Date.now = jest.fn(() => new Date(Date.UTC(2018,
|
||||
10, //November! zero based month!
|
||||
28, 11, 46, 30)).valueOf()); // A Tuesday
|
||||
|
||||
nodeDoc = {
|
||||
_id: 'my-asset-id',
|
||||
name: 'My Asset',
|
||||
node_type: 'asset',
|
||||
_created: "Wed, 07 Nov 2018 16:35:09 GMT",
|
||||
project: {
|
||||
name: 'My Project',
|
||||
url: 'url-to-project'
|
||||
},
|
||||
properties: {
|
||||
content_type: 'image'
|
||||
}
|
||||
};
|
||||
|
||||
spyGet = spyOn($, 'get').and.callFake(function(url) {
|
||||
let ajaxMock = $.Deferred();
|
||||
let response = {
|
||||
variations: [{
|
||||
size: 'l',
|
||||
link: 'wrong-img-link',
|
||||
width: 150,
|
||||
height: 170,
|
||||
},{
|
||||
size: 'm',
|
||||
link: 'img-link',
|
||||
width: 50,
|
||||
height: 70,
|
||||
},{
|
||||
size: 's',
|
||||
link: 'wrong-img-link',
|
||||
width: 5,
|
||||
height: 7,
|
||||
}]
|
||||
}
|
||||
ajaxMock.resolve(response);
|
||||
return ajaxMock.promise();
|
||||
});
|
||||
});
|
||||
describe('image content', () => {
|
||||
test('node with picture', done => {
|
||||
nodeDoc.picture = 'picture_id';
|
||||
let $card = Assets.create$listItem(nodeDoc);
|
||||
jest.runAllTimers();
|
||||
expect($card.length).toEqual(1);
|
||||
expect($card.prop('tagName')).toEqual('A'); // <a>
|
||||
expect($card.hasClass('asset')).toBeTruthy();
|
||||
expect($card.hasClass('card')).toBeTruthy();
|
||||
expect($card.attr('href')).toEqual('/nodes/my-asset-id/redir');
|
||||
expect($card.attr('title')).toEqual('My Asset');
|
||||
|
||||
let $body = $card.find('.card-body');
|
||||
expect($body.length).toEqual(1);
|
||||
|
||||
let $title = $body.find('.card-title');
|
||||
expect($title.length).toEqual(1);
|
||||
|
||||
expect(spyGet).toHaveBeenCalledTimes(1);
|
||||
expect(spyGet).toHaveBeenLastCalledWith('/api/files/picture_id');
|
||||
|
||||
let $image = $card.find('img');
|
||||
expect($image.length).toEqual(1);
|
||||
|
||||
let $imageSubsititure = $card.find('.pi-asset');
|
||||
expect($imageSubsititure.length).toEqual(0);
|
||||
|
||||
let $progress = $card.find('.progress');
|
||||
expect($progress.length).toEqual(0);
|
||||
|
||||
let $watched = $card.find('.card-label');
|
||||
expect($watched.length).toEqual(0);
|
||||
|
||||
expect($card.find(':contains(3 weeks ago)').length).toBeTruthy();
|
||||
done();
|
||||
});
|
||||
|
||||
test('node without picture', done => {
|
||||
let $card = Assets.create$listItem(nodeDoc);
|
||||
expect($card.length).toEqual(1);
|
||||
expect($card.prop('tagName')).toEqual('A'); // <a>
|
||||
expect($card.hasClass('asset')).toBeTruthy();
|
||||
expect($card.hasClass('card')).toBeTruthy();
|
||||
expect($card.attr('href')).toEqual('/nodes/my-asset-id/redir');
|
||||
expect($card.attr('title')).toEqual('My Asset');
|
||||
|
||||
let $body = $card.find('.card-body');
|
||||
expect($body.length).toEqual(1);
|
||||
|
||||
let $title = $body.find('.card-title');
|
||||
expect($title.length).toEqual(1);
|
||||
|
||||
expect(spyGet).toHaveBeenCalledTimes(0);
|
||||
|
||||
let $image = $card.find('img');
|
||||
expect($image.length).toEqual(0);
|
||||
|
||||
let $imageSubsititure = $card.find('.pi-asset');
|
||||
expect($imageSubsititure.length).toEqual(1);
|
||||
|
||||
let $progress = $card.find('.progress');
|
||||
expect($progress.length).toEqual(0);
|
||||
|
||||
let $watched = $card.find('.card-label');
|
||||
expect($watched.length).toEqual(0);
|
||||
|
||||
expect($card.find(':contains(3 weeks ago)').length).toBeTruthy();
|
||||
done();
|
||||
});
|
||||
});
|
||||
})
|
||||
});
|
@@ -0,0 +1,48 @@
|
||||
import { Assets } from '../nodes/Assets'
|
||||
import { Users } from '../users/Users'
|
||||
import { Component } from '../init' // Component is initialized in init
|
||||
|
||||
describe('Component', () => {
|
||||
test('can create Users listItem', () => {
|
||||
let userDoc = {
|
||||
_id: 'my-user-id',
|
||||
username: 'My User Name',
|
||||
full_name: 'My full name',
|
||||
roles: ['admin', 'subscriber']
|
||||
};
|
||||
|
||||
let $user_actual = Component.create$listItem(userDoc);
|
||||
expect($user_actual.length).toBe(1);
|
||||
|
||||
let $user_reference = Users.create$listItem(userDoc);
|
||||
expect($user_actual).toEqual($user_reference);
|
||||
});
|
||||
|
||||
test('can create Asset listItem', () => {
|
||||
let nodeDoc = {
|
||||
_id: 'my-asset-id',
|
||||
name: 'My Asset',
|
||||
node_type: 'asset',
|
||||
project: {
|
||||
name: 'My Project',
|
||||
url: 'url-to-project'
|
||||
},
|
||||
properties: {
|
||||
content_type: 'image'
|
||||
}
|
||||
};
|
||||
|
||||
let $asset_actual = Component.create$listItem(nodeDoc);
|
||||
expect($asset_actual.length).toBe(1);
|
||||
|
||||
let $asset_reference = Assets.create$listItem(nodeDoc);
|
||||
expect($asset_actual).toEqual($asset_reference);
|
||||
});
|
||||
|
||||
test('fail to create unknown', () => {
|
||||
expect(()=>Component.create$listItem({})).toThrow('Can not create component using: {}')
|
||||
expect(()=>Component.create$listItem()).toThrow('Can not create component using: undefined')
|
||||
expect(()=>Component.create$listItem({strange: 'value'}))
|
||||
.toThrow('Can not create component using: {"strange":"value"}')
|
||||
});
|
||||
});
|
67
src/scripts/js/es6/common/templates/__tests__/utils.test.js
Normal file
67
src/scripts/js/es6/common/templates/__tests__/utils.test.js
Normal file
@@ -0,0 +1,67 @@
|
||||
import { prettyDate } from '../utils'
|
||||
|
||||
describe('prettydate', () => {
|
||||
beforeEach(() => {
|
||||
Date.now = jest.fn(() => new Date(Date.UTC(2016,
|
||||
10, //November! zero based month!
|
||||
8, 11, 46, 30)).valueOf()); // A Tuesday
|
||||
});
|
||||
|
||||
test('bad input', () => {
|
||||
expect(prettyDate(undefined)).toBeUndefined();
|
||||
expect(prettyDate(null)).toBeUndefined();
|
||||
expect(prettyDate('my birthday')).toBeUndefined();
|
||||
});
|
||||
|
||||
test('past dates',() => {
|
||||
expect(pd({seconds: -5})).toBe('just now');
|
||||
expect(pd({minutes: -5})).toBe('5m ago')
|
||||
expect(pd({days: -7})).toBe('last Tuesday')
|
||||
expect(pd({days: -8})).toBe('1 week ago')
|
||||
expect(pd({days: -14})).toBe('2 weeks ago')
|
||||
expect(pd({days: -31})).toBe('8 Oct')
|
||||
expect(pd({days: -(31 + 366)})).toBe('8 Oct 2015')
|
||||
});
|
||||
|
||||
test('past dates with time',() => {
|
||||
expect(pd({seconds: -5, detailed: true})).toBe('just now');
|
||||
expect(pd({minutes: -5, detailed: true})).toBe('5m ago')
|
||||
expect(pd({days: -7, detailed: true})).toBe('last Tuesday at 11:46')
|
||||
expect(pd({days: -8, detailed: true})).toBe('1 week ago at 11:46')
|
||||
// summer time bellow
|
||||
expect(pd({days: -14, detailed: true})).toBe('2 weeks ago at 10:46')
|
||||
expect(pd({days: -31, detailed: true})).toBe('8 Oct at 10:46')
|
||||
expect(pd({days: -(31 + 366), detailed: true})).toBe('8 Oct 2015 at 10:46')
|
||||
});
|
||||
|
||||
test('future dates',() => {
|
||||
expect(pd({seconds: 5})).toBe('just now')
|
||||
expect(pd({minutes: 5})).toBe('in 5m')
|
||||
expect(pd({days: 7})).toBe('next Tuesday')
|
||||
expect(pd({days: 8})).toBe('in 1 week')
|
||||
expect(pd({days: 14})).toBe('in 2 weeks')
|
||||
expect(pd({days: 30})).toBe('8 Dec')
|
||||
expect(pd({days: 30 + 365})).toBe('8 Dec 2017')
|
||||
});
|
||||
|
||||
test('future dates',() => {
|
||||
expect(pd({seconds: 5, detailed: true})).toBe('just now')
|
||||
expect(pd({minutes: 5, detailed: true})).toBe('in 5m')
|
||||
expect(pd({days: 7, detailed: true})).toBe('next Tuesday at 11:46')
|
||||
expect(pd({days: 8, detailed: true})).toBe('in 1 week at 11:46')
|
||||
expect(pd({days: 14, detailed: true})).toBe('in 2 weeks at 11:46')
|
||||
expect(pd({days: 30, detailed: true})).toBe('8 Dec at 11:46')
|
||||
expect(pd({days: 30 + 365, detailed: true})).toBe('8 Dec 2017 at 11:46')
|
||||
});
|
||||
|
||||
function pd(params) {
|
||||
let theDate = new Date(Date.now());
|
||||
theDate.setFullYear(theDate.getFullYear() + (params['years'] || 0));
|
||||
theDate.setMonth(theDate.getMonth() + (params['months'] || 0));
|
||||
theDate.setDate(theDate.getDate() + (params['days'] || 0));
|
||||
theDate.setHours(theDate.getHours() + (params['hours'] || 0));
|
||||
theDate.setMinutes(theDate.getMinutes() + (params['minutes'] || 0));
|
||||
theDate.setSeconds(theDate.getSeconds() + (params['seconds'] || 0));
|
||||
return prettyDate(theDate, (params['detailed'] || false))
|
||||
}
|
||||
});
|
34
src/scripts/js/es6/common/templates/component/Component.js
Normal file
34
src/scripts/js/es6/common/templates/component/Component.js
Normal file
@@ -0,0 +1,34 @@
|
||||
import { ComponentCreatorInterface } from './ComponentCreatorInterface'
|
||||
|
||||
const REGISTERED_CREATORS = []
|
||||
|
||||
export class Component extends ComponentCreatorInterface {
|
||||
static create$listItem(doc) {
|
||||
let creator = Component.getCreator(doc);
|
||||
return creator.create$listItem(doc);
|
||||
}
|
||||
|
||||
static create$item(doc) {
|
||||
let creator = Component.getCreator(doc);
|
||||
return creator.create$item(doc);
|
||||
}
|
||||
|
||||
static canCreate(candidate) {
|
||||
return !!Component.getCreator(candidate);
|
||||
}
|
||||
|
||||
static regiseterCreator(creator) {
|
||||
REGISTERED_CREATORS.push(creator);
|
||||
}
|
||||
|
||||
static getCreator(doc) {
|
||||
if (doc) {
|
||||
for (let candidate of REGISTERED_CREATORS) {
|
||||
if (candidate.canCreate(doc)) {
|
||||
return candidate;
|
||||
}
|
||||
}
|
||||
}
|
||||
throw 'Can not create component using: ' + JSON.stringify(doc);
|
||||
}
|
||||
}
|
@@ -0,0 +1,27 @@
|
||||
export class ComponentCreatorInterface {
|
||||
/**
|
||||
* @param {JSON} doc
|
||||
* @returns {$element}
|
||||
*/
|
||||
static create$listItem(doc) {
|
||||
throw 'Not Implemented';
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {JSON} doc
|
||||
* @returns {$element}
|
||||
*/
|
||||
static create$item(doc) {
|
||||
throw 'Not Implemented';
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {JSON} candidate
|
||||
* @returns {boolean}
|
||||
*/
|
||||
static canCreate(candidate) {
|
||||
throw 'Not Implemented';
|
||||
}
|
||||
}
|
18
src/scripts/js/es6/common/templates/init.js
Normal file
18
src/scripts/js/es6/common/templates/init.js
Normal file
@@ -0,0 +1,18 @@
|
||||
import { Nodes } from './nodes/Nodes';
|
||||
import { Assets } from './nodes/Assets';
|
||||
import { Posts } from './nodes/Posts';
|
||||
|
||||
import { Users } from './users/Users';
|
||||
import { Component } from './component/Component';
|
||||
|
||||
Nodes.registerTemplate('asset', Assets);
|
||||
Nodes.registerTemplate('post', Posts);
|
||||
|
||||
Component.regiseterCreator(Nodes);
|
||||
Component.regiseterCreator(Users);
|
||||
|
||||
export {
|
||||
Nodes,
|
||||
Users,
|
||||
Component
|
||||
};
|
45
src/scripts/js/es6/common/templates/nodes/Assets.js
Normal file
45
src/scripts/js/es6/common/templates/nodes/Assets.js
Normal file
@@ -0,0 +1,45 @@
|
||||
import { NodesBase } from "./NodesBase";
|
||||
import { thenLoadVideoProgress } from '../utils';
|
||||
|
||||
export class Assets extends NodesBase{
|
||||
static create$listItem(node) {
|
||||
var markIfPublic = true;
|
||||
let $card = super.create$listItem(node);
|
||||
$card.addClass('asset');
|
||||
|
||||
if (node.properties && node.properties.duration){
|
||||
let $thumbnailContainer = $card.find('.js-thumbnail-container')
|
||||
let $cardDuration = $('<div class="card-label right">' + node.properties.duration + '</div>');
|
||||
$thumbnailContainer.append($cardDuration);
|
||||
|
||||
/* Video progress and 'watched' label. */
|
||||
$(window).trigger('pillar:workStart');
|
||||
thenLoadVideoProgress(node._id)
|
||||
.fail(console.log)
|
||||
.then((view_progress)=>{
|
||||
if (!view_progress) return
|
||||
|
||||
let $cardProgress = $('<div class="progress rounded-0">');
|
||||
let $cardProgressBar = $('<div class="progress-bar">');
|
||||
$cardProgressBar.css('width', view_progress.progress_in_percent + '%');
|
||||
$cardProgress.append($cardProgressBar);
|
||||
$thumbnailContainer.append($cardProgress);
|
||||
|
||||
if (view_progress.done){
|
||||
let card_progress_done = $('<div class="card-label">WATCHED</div>');
|
||||
$thumbnailContainer.append(card_progress_done);
|
||||
}
|
||||
})
|
||||
.always(function() {
|
||||
$(window).trigger('pillar:workStop');
|
||||
});
|
||||
}
|
||||
|
||||
/* 'Free' ribbon for public assets. */
|
||||
if (markIfPublic && node.permissions && node.permissions.world){
|
||||
$card.addClass('free');
|
||||
}
|
||||
|
||||
return $card;
|
||||
}
|
||||
}
|
63
src/scripts/js/es6/common/templates/nodes/Nodes.js
Normal file
63
src/scripts/js/es6/common/templates/nodes/Nodes.js
Normal file
@@ -0,0 +1,63 @@
|
||||
import { NodesBase } from './NodesBase';
|
||||
import { ComponentCreatorInterface } from '../component/ComponentCreatorInterface'
|
||||
|
||||
let CREATE_NODE_ITEM_MAP = {}
|
||||
|
||||
export class Nodes extends ComponentCreatorInterface {
|
||||
/**
|
||||
* Creates a small list item out of a node document
|
||||
* @param {NodeDoc} node mongodb or elastic node document
|
||||
*/
|
||||
static create$listItem(node) {
|
||||
let factory = CREATE_NODE_ITEM_MAP[node.node_type] || NodesBase;
|
||||
return factory.create$listItem(node);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a full view out of a node document
|
||||
* @param {NodeDoc} node mongodb or elastic node document
|
||||
*/
|
||||
static create$item(node) {
|
||||
let factory = CREATE_NODE_ITEM_MAP[node.node_type] || NodesBase;
|
||||
return factory.create$item(node);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a list of items and a 'Load More' button
|
||||
* @param {List} nodes A list of nodes to be created
|
||||
* @param {Int} initial Number of nodes to show initially
|
||||
* @param {Int} loadNext Number of nodes to show when clicking 'Load More'. If 0, no load more button will be shown
|
||||
*/
|
||||
static createListOf$nodeItems(nodes, initial=8, loadNext=8) {
|
||||
let nodesLeftToRender = nodes.slice();
|
||||
let nodesToCreate = nodesLeftToRender.splice(0, initial);
|
||||
let listOf$items = nodesToCreate.map(Nodes.create$listItem);
|
||||
|
||||
if (loadNext > 0 && nodesLeftToRender.length) {
|
||||
let $link = $('<a>')
|
||||
.addClass('btn btn-outline-primary px-5 mb-auto btn-block js-load-next')
|
||||
.attr('href', 'javascript:void(0);')
|
||||
.click((e)=> {
|
||||
let $target = $(e.target);
|
||||
$target.replaceWith(Nodes.createListOf$nodeItems(nodesLeftToRender, loadNext, loadNext));
|
||||
})
|
||||
.text('Load More');
|
||||
|
||||
listOf$items.push($link);
|
||||
}
|
||||
return listOf$items;
|
||||
}
|
||||
|
||||
static canCreate(candidate) {
|
||||
return !!candidate.node_type;
|
||||
}
|
||||
|
||||
/**
|
||||
* Register template classes to handle the cunstruction of diffrent node types
|
||||
* @param { String } node_type The node type whose template that is registered
|
||||
* @param { NodesBase } klass The class to handle the creation of jQuery objects
|
||||
*/
|
||||
static registerTemplate(node_type, klass) {
|
||||
CREATE_NODE_ITEM_MAP[node_type] = klass;
|
||||
}
|
||||
}
|
58
src/scripts/js/es6/common/templates/nodes/NodesBase.js
Normal file
58
src/scripts/js/es6/common/templates/nodes/NodesBase.js
Normal file
@@ -0,0 +1,58 @@
|
||||
import { thenLoadImage, prettyDate } from '../utils';
|
||||
import { ComponentCreatorInterface } from '../component/ComponentCreatorInterface'
|
||||
|
||||
export class NodesBase extends ComponentCreatorInterface {
|
||||
static create$listItem(node) {
|
||||
let nid = (node._id || node.objectID); // To support both mongo and elastic nodes
|
||||
let $card = $('<a class="card node card-image-fade asset">')
|
||||
.attr('data-node-id', nid)
|
||||
.attr('href', '/nodes/' + nid + '/redir')
|
||||
.attr('title', node.name);
|
||||
let $thumbnailContainer = $('<div class="card-thumbnail js-thumbnail-container">');
|
||||
function warnNoPicture() {
|
||||
let $cardIcon = $('<div class="card-img-top card-icon">');
|
||||
$cardIcon.html('<i class="pi-' + node.node_type + '">');
|
||||
$thumbnailContainer.append($cardIcon);
|
||||
}
|
||||
if (!node.picture) {
|
||||
warnNoPicture();
|
||||
}
|
||||
else {
|
||||
$(window).trigger('pillar:workStart');
|
||||
thenLoadImage(node.picture)
|
||||
.fail(warnNoPicture)
|
||||
.then((imgVariation) => {
|
||||
let img = $('<img class="card-img-top">')
|
||||
.attr('alt', node.name)
|
||||
.attr('src', imgVariation.link)
|
||||
.attr('width', imgVariation.width)
|
||||
.attr('height', imgVariation.height);
|
||||
$thumbnailContainer.append(img);
|
||||
})
|
||||
.always(function () {
|
||||
$(window).trigger('pillar:workStop');
|
||||
});
|
||||
}
|
||||
$card.append($thumbnailContainer);
|
||||
/* Card body for title and meta info. */
|
||||
let $cardBody = $('<div class="card-body p-2 d-flex flex-column">');
|
||||
let $cardTitle = $('<div class="card-title px-2 mb-2 font-weight-bold">');
|
||||
$cardTitle.text(node.name);
|
||||
$cardBody.append($cardTitle);
|
||||
let $cardMeta = $('<ul class="card-text px-2 list-unstyled d-flex text-black-50 mt-auto">');
|
||||
let $cardProject = $('<a class="font-weight-bold pr-2">')
|
||||
.attr('href', '/p/' + node.project.url)
|
||||
.attr('title', node.project.name)
|
||||
.text(node.project.name);
|
||||
$cardMeta.append($cardProject);
|
||||
let created = node._created || node.created_at; // mongodb + elastic
|
||||
$cardMeta.append('<li>' + prettyDate(created) + '</li>');
|
||||
$cardBody.append($cardMeta);
|
||||
$card.append($cardBody);
|
||||
return $card;
|
||||
}
|
||||
|
||||
static canCreate(candidate) {
|
||||
return !!candidate.node_type;
|
||||
}
|
||||
}
|
21
src/scripts/js/es6/common/templates/nodes/Posts.js
Normal file
21
src/scripts/js/es6/common/templates/nodes/Posts.js
Normal file
@@ -0,0 +1,21 @@
|
||||
import { NodesBase } from "./NodesBase";
|
||||
|
||||
export class Posts extends NodesBase {
|
||||
static create$item(post) {
|
||||
let content = [];
|
||||
let $title = $('<div>')
|
||||
.addClass('h1 text-uppercase mt-4 mb-3')
|
||||
.text(post.name);
|
||||
content.push($title);
|
||||
let $post = $('<div>')
|
||||
.addClass('expand-image-links imgs-fluid')
|
||||
.append(
|
||||
content,
|
||||
$('<div>')
|
||||
.addClass('node-details-description')
|
||||
.html(post['properties']['pretty_content'])
|
||||
);
|
||||
|
||||
return $post;
|
||||
}
|
||||
}
|
23
src/scripts/js/es6/common/templates/users/Users.js
Normal file
23
src/scripts/js/es6/common/templates/users/Users.js
Normal file
@@ -0,0 +1,23 @@
|
||||
import { ComponentCreatorInterface } from '../component/ComponentCreatorInterface'
|
||||
|
||||
export class Users extends ComponentCreatorInterface {
|
||||
static create$listItem(userDoc) {
|
||||
return $('<div>')
|
||||
.addClass('users p-2 border-bottom')
|
||||
.attr('data-user-id', userDoc._id || userDoc.objectID )
|
||||
.append(
|
||||
$('<h6>')
|
||||
.addClass('mb-0 font-weight-bold')
|
||||
.text(userDoc.full_name),
|
||||
$('<small>')
|
||||
.text(userDoc.username),
|
||||
$('<small>')
|
||||
.addClass('d-block roles text-info')
|
||||
.text(userDoc.roles.join(', '))
|
||||
)
|
||||
}
|
||||
|
||||
static canCreate(candidate) {
|
||||
return !!candidate.username;
|
||||
}
|
||||
}
|
@@ -0,0 +1,46 @@
|
||||
import { Users } from '../Users'
|
||||
|
||||
describe('Users', () => {
|
||||
let userDoc;
|
||||
describe('create$listItem', () => {
|
||||
beforeEach(()=>{
|
||||
userDoc = {
|
||||
_id: 'my-user-id',
|
||||
username: 'My User Name',
|
||||
full_name: 'My full name',
|
||||
roles: ['admin', 'subscriber']
|
||||
};
|
||||
});
|
||||
test('happy case', () => {
|
||||
let $user = Users.create$listItem(userDoc);
|
||||
expect($user.length).toBe(1);
|
||||
expect($user.hasClass('users')).toBeTruthy();
|
||||
expect($user.data('user-id')).toBe('my-user-id');
|
||||
|
||||
let $username = $user.find(':contains(My User Name)');
|
||||
expect($username.length).toBe(1);
|
||||
|
||||
let $fullName = $user.find(':contains(My full name)');
|
||||
expect($fullName.length).toBe(1);
|
||||
|
||||
let $roles = $user.find('.roles');
|
||||
expect($roles.length).toBe(1);
|
||||
expect($roles.text()).toBe('admin, subscriber')
|
||||
});
|
||||
})
|
||||
|
||||
describe('create$item', () => {
|
||||
beforeEach(()=>{
|
||||
userDoc = {
|
||||
_id: 'my-user-id',
|
||||
username: 'My User Name',
|
||||
full_name: 'My full name',
|
||||
roles: ['admin', 'subscriber']
|
||||
};
|
||||
});
|
||||
test('Not Implemented', () => {
|
||||
// Replace with proper test once implemented
|
||||
expect(()=>Users.create$item(userDoc)).toThrow('Not Implemented');
|
||||
});
|
||||
})
|
||||
});
|
122
src/scripts/js/es6/common/templates/utils.js
Normal file
122
src/scripts/js/es6/common/templates/utils.js
Normal file
@@ -0,0 +1,122 @@
|
||||
function thenLoadImage(imgId, size = 'm') {
|
||||
return $.get('/api/files/' + imgId)
|
||||
.then((resp)=> {
|
||||
var show_variation = null;
|
||||
if (typeof resp.variations != 'undefined') {
|
||||
for (var variation of resp.variations) {
|
||||
if (variation.size != size) continue;
|
||||
show_variation = variation;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (show_variation == null) {
|
||||
throw 'Image not found: ' + imgId + ' size: ' + size;
|
||||
}
|
||||
return show_variation;
|
||||
})
|
||||
}
|
||||
|
||||
function thenLoadVideoProgress(nodeId) {
|
||||
return $.get('/api/users/video/' + nodeId + '/progress')
|
||||
}
|
||||
|
||||
function prettyDate(time, detail=false) {
|
||||
/**
|
||||
* time is anything Date can parse, and we return a
|
||||
pretty string like 'an hour ago', 'Yesterday', '3 months ago',
|
||||
'just now', etc
|
||||
*/
|
||||
let theDate = new Date(time);
|
||||
if (!time || isNaN(theDate)) {
|
||||
return
|
||||
}
|
||||
let pretty = '';
|
||||
let now = new Date(Date.now()); // Easier to mock Date.now() in tests
|
||||
let second_diff = Math.round((now - theDate) / 1000);
|
||||
|
||||
let day_diff = Math.round(second_diff / 86400); // seconds per day (60*60*24)
|
||||
|
||||
if ((day_diff < 0) && (theDate.getFullYear() !== now.getFullYear())) {
|
||||
// "Jul 16, 2018"
|
||||
pretty = theDate.toLocaleDateString('en-NL',{day: 'numeric', month: 'short', year: 'numeric'});
|
||||
}
|
||||
else if ((day_diff < -21) && (theDate.getFullYear() == now.getFullYear())) {
|
||||
// "Jul 16"
|
||||
pretty = theDate.toLocaleDateString('en-NL',{day: 'numeric', month: 'short'});
|
||||
}
|
||||
else if (day_diff < -7){
|
||||
let week_count = Math.round(-day_diff / 7);
|
||||
if (week_count == 1)
|
||||
pretty = "in 1 week";
|
||||
else
|
||||
pretty = "in " + week_count +" weeks";
|
||||
}
|
||||
else if (day_diff < -1)
|
||||
// "next Tuesday"
|
||||
pretty = 'next ' + theDate.toLocaleDateString('en-NL',{weekday: 'long'});
|
||||
else if (day_diff === 0) {
|
||||
if (second_diff < 0) {
|
||||
let seconds = Math.abs(second_diff);
|
||||
if (seconds < 10)
|
||||
return 'just now';
|
||||
if (seconds < 60)
|
||||
return 'in ' + seconds +'s';
|
||||
if (seconds < 120)
|
||||
return 'in a minute';
|
||||
if (seconds < 3600)
|
||||
return 'in ' + Math.round(seconds / 60) + 'm';
|
||||
if (seconds < 7200)
|
||||
return 'in an hour';
|
||||
if (seconds < 86400)
|
||||
return 'in ' + Math.round(seconds / 3600) + 'h';
|
||||
} else {
|
||||
let seconds = second_diff;
|
||||
if (seconds < 10)
|
||||
return "just now";
|
||||
if (seconds < 60)
|
||||
return seconds + "s ago";
|
||||
if (seconds < 120)
|
||||
return "a minute ago";
|
||||
if (seconds < 3600)
|
||||
return Math.round(seconds / 60) + "m ago";
|
||||
if (seconds < 7200)
|
||||
return "an hour ago";
|
||||
if (seconds < 86400)
|
||||
return Math.round(seconds / 3600) + "h ago";
|
||||
}
|
||||
|
||||
}
|
||||
else if (day_diff == 1)
|
||||
pretty = "yesterday";
|
||||
|
||||
else if (day_diff <= 7)
|
||||
// "last Tuesday"
|
||||
pretty = 'last ' + theDate.toLocaleDateString('en-NL',{weekday: 'long'});
|
||||
|
||||
else if (day_diff <= 22) {
|
||||
let week_count = Math.round(day_diff / 7);
|
||||
if (week_count == 1)
|
||||
pretty = "1 week ago";
|
||||
else
|
||||
pretty = week_count + " weeks ago";
|
||||
}
|
||||
else if (theDate.getFullYear() === now.getFullYear())
|
||||
// "Jul 16"
|
||||
pretty = theDate.toLocaleDateString('en-NL',{day: 'numeric', month: 'short'});
|
||||
|
||||
else
|
||||
// "Jul 16", 2009
|
||||
pretty = theDate.toLocaleDateString('en-NL',{day: 'numeric', month: 'short', year: 'numeric'});
|
||||
|
||||
if (detail){
|
||||
// "Tuesday at 04:20"
|
||||
let paddedHour = ('00' + theDate.getUTCHours()).substr(-2);
|
||||
let paddedMin = ('00' + theDate.getUTCMinutes()).substr(-2);
|
||||
return pretty + ' at ' + paddedHour + ':' + paddedMin;
|
||||
}
|
||||
|
||||
return pretty;
|
||||
}
|
||||
|
||||
export { thenLoadImage, thenLoadVideoProgress, prettyDate };
|
1
src/scripts/js/es6/common/utils/init.js
Normal file
1
src/scripts/js/es6/common/utils/init.js
Normal file
@@ -0,0 +1 @@
|
||||
export { transformPlaceholder } from './placeholder'
|
15
src/scripts/js/es6/common/utils/placeholder.js
Normal file
15
src/scripts/js/es6/common/utils/placeholder.js
Normal file
@@ -0,0 +1,15 @@
|
||||
/**
|
||||
* Fade out placeholder, then call callback.
|
||||
* Note that the placeholder will not be removed, and will not be keeped hidden. The caller decides what to do with
|
||||
* the placeholder.
|
||||
* @param {jQueryObject} $placeholder
|
||||
* @param {callback} cb
|
||||
*/
|
||||
export function transformPlaceholder($placeholder, cb) {
|
||||
$placeholder.addClass('placeholder replaced')
|
||||
.delay(250)
|
||||
.queue(()=>{
|
||||
$placeholder.removeClass('placeholder replaced');
|
||||
cb();
|
||||
})
|
||||
}
|
198
src/scripts/js/es6/individual/timeline/Timeline.js
Normal file
198
src/scripts/js/es6/individual/timeline/Timeline.js
Normal file
@@ -0,0 +1,198 @@
|
||||
/**
|
||||
* Consumes data in the form:
|
||||
* {
|
||||
* groups: [{
|
||||
* label: 'Week 32',
|
||||
* url: null, // optional
|
||||
* groups: [{
|
||||
* label: 'Spring',
|
||||
* url: '/p/spring',
|
||||
* items:{
|
||||
* post: [nodeDoc, nodeDoc], // primary (fully rendered)
|
||||
* asset: [nodeDoc, nodeDoc] // secondary (rendered as list item)
|
||||
* },
|
||||
* groups: ...
|
||||
* }]
|
||||
* }],
|
||||
* continue_from: 123456.2 // python timestamp
|
||||
* }
|
||||
*/
|
||||
const DEFAULT_URL = '/api/timeline';
|
||||
const transformPlaceholder = pillar.utils.transformPlaceholder;
|
||||
|
||||
export class Timeline {
|
||||
constructor(target, builder) {
|
||||
this._$targetDom = $(target);
|
||||
this._url;
|
||||
this._queryParams = {};
|
||||
this._builder = builder;
|
||||
this._init();
|
||||
}
|
||||
|
||||
_init() {
|
||||
this._workStart();
|
||||
this._setUrl();
|
||||
this._setQueryParams();
|
||||
this._thenLoadMore()
|
||||
.then((it)=>{
|
||||
transformPlaceholder(this._$targetDom, () => {
|
||||
this._$targetDom.empty()
|
||||
.append(it);
|
||||
if (this._hasMore()) {
|
||||
let btn = this._create$LoadMoreBtn();
|
||||
this._$targetDom.append(btn);
|
||||
}
|
||||
})
|
||||
})
|
||||
.always(this._workStop.bind(this));
|
||||
}
|
||||
|
||||
_setUrl() {
|
||||
let projectId = this._$targetDom.data('project-id');
|
||||
this._url = DEFAULT_URL
|
||||
if (projectId) {
|
||||
this._url += '/p/' + projectId
|
||||
}
|
||||
}
|
||||
|
||||
_setQueryParams() {
|
||||
let sortDirection = this._$targetDom.data('sort-dir');
|
||||
if (sortDirection) {
|
||||
this._queryParams['dir'] = sortDirection;
|
||||
}
|
||||
}
|
||||
|
||||
_loadMore(event) {
|
||||
let $spinner = $('<i>').addClass('ml-2 pi-spin spinner');
|
||||
let $loadmoreBtn = $(event.target)
|
||||
.append($spinner)
|
||||
.addClass('disabled');
|
||||
|
||||
this._workStart();
|
||||
this._thenLoadMore()
|
||||
.then((it)=>{
|
||||
$loadmoreBtn.before(it);
|
||||
})
|
||||
.always(()=>{
|
||||
if (this._hasMore()) {
|
||||
$loadmoreBtn.removeClass('disabled');
|
||||
$spinner.remove();
|
||||
} else {
|
||||
$loadmoreBtn.remove();
|
||||
}
|
||||
this._workStop();
|
||||
});
|
||||
}
|
||||
|
||||
_hasMore() {
|
||||
return !!this._queryParams['from'];
|
||||
}
|
||||
|
||||
_thenLoadMore() {
|
||||
this._workStart();
|
||||
let qParams = $.param(this._queryParams);
|
||||
return $.getJSON(this._url + '?' + qParams)
|
||||
.then(this._render.bind(this))
|
||||
.fail(this._workFailed.bind(this))
|
||||
.always(this._workStop.bind(this))
|
||||
}
|
||||
|
||||
_render(toRender) {
|
||||
this._queryParams['from'] = toRender['continue_from'];
|
||||
return toRender['groups']
|
||||
.map(this._create$Group.bind(this));
|
||||
}
|
||||
|
||||
_create$Group(group) {
|
||||
return this._builder.build$Group(0, group);
|
||||
}
|
||||
|
||||
_create$LoadMoreBtn() {
|
||||
return $('<a>')
|
||||
.addClass('btn btn-outline-primary btn-block js-load-next mb-3')
|
||||
.attr('href', 'javascript:void(0);')
|
||||
.click(this._loadMore.bind(this))
|
||||
.text('Show More Weeks');
|
||||
}
|
||||
|
||||
_workStart() {
|
||||
this._$targetDom.trigger('pillar:workStart');
|
||||
return arguments;
|
||||
}
|
||||
|
||||
_workStop() {
|
||||
this._$targetDom.trigger('pillar:workStop');
|
||||
return arguments;
|
||||
}
|
||||
|
||||
_workFailed(error) {
|
||||
let msg = xhrErrorResponseMessage(error);
|
||||
this._$targetDom.trigger('pillar:failure', msg);
|
||||
return error;
|
||||
}
|
||||
}
|
||||
|
||||
class GroupBuilder {
|
||||
build$Group(level, group) {
|
||||
let content = []
|
||||
let $label = this._create$Label(level, group['label'], group['url']);
|
||||
if (group['items']) {
|
||||
content = content.concat(this._create$Items(group['items']));
|
||||
}
|
||||
if(group['groups']) {
|
||||
content = content.concat(group['groups'].map(this.build$Group.bind(this, level+1)));
|
||||
}
|
||||
return $('<div>')
|
||||
.addClass('group')
|
||||
.append(
|
||||
$label,
|
||||
content
|
||||
);
|
||||
}
|
||||
|
||||
_create$Items(items) {
|
||||
let content = [];
|
||||
let primaryNodes = items['post'];
|
||||
let secondaryNodes = items['asset'];
|
||||
if (primaryNodes) {
|
||||
content.push(
|
||||
$('<div>')
|
||||
.append(primaryNodes.map(pillar.templates.Nodes.create$item))
|
||||
);
|
||||
}
|
||||
if (secondaryNodes) {
|
||||
content.push(
|
||||
$('<div>')
|
||||
.addClass('card-deck card-padless card-deck-responsive js-asset-list p-3 pb-5 mb-5')
|
||||
.append(pillar.templates.Nodes.createListOf$nodeItems(secondaryNodes))
|
||||
);
|
||||
}
|
||||
return content;
|
||||
}
|
||||
|
||||
_create$Label(level, label, url) {
|
||||
let type = level == 0 ? 'h6 float-right py-2 group-date' : 'h6 py-2 group-title'
|
||||
if (url) {
|
||||
return $('<div>')
|
||||
.addClass(type + ' sticky-top')
|
||||
.append(
|
||||
$('<a>')
|
||||
.attr('href', url)
|
||||
.text(label)
|
||||
);
|
||||
}
|
||||
return $('<div>')
|
||||
.addClass(type + ' sticky-top')
|
||||
.text(label);
|
||||
}
|
||||
}
|
||||
|
||||
$.fn.extend({
|
||||
timeline: function() {
|
||||
this.each(function(i, target) {
|
||||
new Timeline(target,
|
||||
new GroupBuilder()
|
||||
);
|
||||
});
|
||||
}
|
||||
})
|
7
src/scripts/js/es6/individual/timeline/init.js
Normal file
7
src/scripts/js/es6/individual/timeline/init.js
Normal file
@@ -0,0 +1,7 @@
|
||||
export { Timeline } from './Timeline';
|
||||
|
||||
// Init timelines on document ready
|
||||
$(function() {
|
||||
$(".timeline")
|
||||
.timeline();
|
||||
})
|
2
src/scripts/js/es6/test_config/test-env.js
Normal file
2
src/scripts/js/es6/test_config/test-env.js
Normal file
@@ -0,0 +1,2 @@
|
||||
import $ from 'jquery';
|
||||
global.$ = global.jQuery = $;
|
File diff suppressed because it is too large
Load Diff
@@ -1,116 +0,0 @@
|
||||
(function () {
|
||||
var output, Converter;
|
||||
if (typeof exports === "object" && typeof require === "function") { // we're in a CommonJS (e.g. Node.js) module
|
||||
output = exports;
|
||||
Converter = require("./Markdown.Converter").Converter;
|
||||
} else {
|
||||
output = window.Markdown;
|
||||
Converter = output.Converter;
|
||||
}
|
||||
|
||||
output.getSanitizingConverter = function () {
|
||||
var converter = new Converter();
|
||||
converter.hooks.chain("postConversion", sanitizeHtml);
|
||||
converter.hooks.chain("postConversion", balanceTags);
|
||||
return converter;
|
||||
}
|
||||
|
||||
function sanitizeHtml(html) {
|
||||
return html.replace(/<[^>]*>?/gi, sanitizeTag);
|
||||
}
|
||||
|
||||
// (tags that can be opened/closed) | (tags that stand alone)
|
||||
var basic_tag_whitelist = /^(<\/?(b|blockquote|code|del|dd|dl|dt|em|h1|h2|h3|i|iframe|kbd|li|ol(?: start="\d+")?|p|pre|s|sup|sub|strong|strike|ul|video)>|<(br|hr)\s?\/?>)$/i;
|
||||
// <a href="url..." optional title>|</a>
|
||||
var a_white = /^(<a\shref="((https?|ftp):\/\/|\/)[-A-Za-z0-9+&@#\/%?=~_|!:,.;\(\)*[\]$]+"(\stitle="[^"<>]+")?(\sclass="[^"<>]+")?\s?>|<\/a>)$/i;
|
||||
|
||||
// Cloud custom: Allow iframe embed from YouTube, Vimeo and SoundCloud
|
||||
var iframe_youtube = /^(<iframe(\swidth="\d{1,3}")?(\sheight="\d{1,3}")\ssrc="((https?):\/\/|\/)[-A-Za-z0-9+&@#\/%?=~_|!:,.;\(\)*[\]$]+"(\sframeborder="\d{1,3}")?(\sallowfullscreen)\s?>|<\/iframe>)$/i;
|
||||
var iframe_vimeo = /^(<iframe(\ssrc="((https?):\/\/|\/)[-A-Za-z0-9+&@#\/%?=~_|!:,.;\(\)*[\]$]+"?\swidth="\d{1,3}")?(\sheight="\d{1,3}")?(\sframeborder="\d{1,3}")?(\swebkitallowfullscreen)\s?(\smozallowfullscreen)\s?(\sallowfullscreen)\s?>|<\/iframe>)$/i;
|
||||
var iframe_soundcloud = /^(<iframe(\swidth="\d{1,3}\%")?(\sheight="\d{1,3}")?(\sscrolling="(?:yes|no)")?(\sframeborder="(?:yes|no)")\ssrc="((https?):\/\/|\/)[-A-Za-z0-9+&@#\/%?=~_|!:,.;\(\)*[\]$]+"\s?>|<\/iframe>)$/i;
|
||||
var iframe_googlestorage = /^(<iframe\ssrc="https:\/\/storage.googleapis.com\/institute-storage\/.+"\sstyle=".*"\s?>|<\/iframe>)$/i;
|
||||
|
||||
// <img src="url..." optional width optional height optional alt optional title
|
||||
var img_white = /^(<img\ssrc="(https?:\/\/|\/)[-A-Za-z0-9+&@#\/%?=~_|!:,.;\(\)*[\]$]+"(\swidth="\d{1,3}")?(\sheight="\d{1,3}")?(\salt="[^"<>]*")?(\stitle="[^"<>]*")?\s?\/?>)$/i;
|
||||
var video_white = /<video(.*?)>/;
|
||||
|
||||
function sanitizeTag(tag) {
|
||||
if (tag.match(basic_tag_whitelist) || tag.match(a_white) || tag.match(img_white) || tag.match(iframe_youtube) || tag.match(iframe_vimeo) || tag.match(iframe_soundcloud) || tag.match(iframe_googlestorage) || tag.match(video_white)) {
|
||||
return tag;
|
||||
} else {
|
||||
return "";
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// attempt to balance HTML tags in the html string
|
||||
/// by removing any unmatched opening or closing tags
|
||||
/// IMPORTANT: we *assume* HTML has *already* been
|
||||
/// sanitized and is safe/sane before balancing!
|
||||
///
|
||||
/// adapted from CODESNIPPET: A8591DBA-D1D3-11DE-947C-BA5556D89593
|
||||
/// </summary>
|
||||
function balanceTags(html) {
|
||||
|
||||
if (html == "")
|
||||
return "";
|
||||
|
||||
var re = /<\/?\w+[^>]*(\s|$|>)/g;
|
||||
// convert everything to lower case; this makes
|
||||
// our case insensitive comparisons easier
|
||||
var tags = html.toLowerCase().match(re);
|
||||
|
||||
// no HTML tags present? nothing to do; exit now
|
||||
var tagcount = (tags || []).length;
|
||||
if (tagcount == 0)
|
||||
return html;
|
||||
|
||||
var tagname, tag;
|
||||
var ignoredtags = "<p><img><br><li><hr>";
|
||||
var match;
|
||||
var tagpaired = [];
|
||||
var tagremove = [];
|
||||
var needsRemoval = false;
|
||||
|
||||
// loop through matched tags in forward order
|
||||
for (var ctag = 0; ctag < tagcount; ctag++) {
|
||||
tagname = tags[ctag].replace(/<\/?(\w+).*/, "$1");
|
||||
// skip any already paired tags
|
||||
// and skip tags in our ignore list; assume they're self-closed
|
||||
if (tagpaired[ctag] || ignoredtags.search("<" + tagname + ">") > -1)
|
||||
continue;
|
||||
|
||||
tag = tags[ctag];
|
||||
match = -1;
|
||||
|
||||
if (!/^<\//.test(tag)) {
|
||||
// this is an opening tag
|
||||
// search forwards (next tags), look for closing tags
|
||||
for (var ntag = ctag + 1; ntag < tagcount; ntag++) {
|
||||
if (!tagpaired[ntag] && tags[ntag] == "</" + tagname + ">") {
|
||||
match = ntag;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (match == -1)
|
||||
needsRemoval = tagremove[ctag] = true; // mark for removal
|
||||
else
|
||||
tagpaired[match] = true; // mark paired
|
||||
}
|
||||
|
||||
if (!needsRemoval)
|
||||
return html;
|
||||
|
||||
// delete all orphaned tags from the string
|
||||
|
||||
var ctag = 0;
|
||||
html = html.replace(re, function (match) {
|
||||
var res = tagremove[ctag] ? "" : match;
|
||||
ctag++;
|
||||
return res;
|
||||
});
|
||||
return html;
|
||||
}
|
||||
})();
|
File diff suppressed because it is too large
Load Diff
@@ -1,874 +0,0 @@
|
||||
(function () {
|
||||
// A quick way to make sure we're only keeping span-level tags when we need to.
|
||||
// This isn't supposed to be foolproof. It's just a quick way to make sure we
|
||||
// keep all span-level tags returned by a pagedown converter. It should allow
|
||||
// all span-level tags through, with or without attributes.
|
||||
var inlineTags = new RegExp(['^(<\\/?(a|abbr|acronym|applet|area|b|basefont|',
|
||||
'bdo|big|button|cite|code|del|dfn|em|figcaption|',
|
||||
'font|i|iframe|img|input|ins|kbd|label|map|',
|
||||
'mark|meter|object|param|progress|q|ruby|rp|rt|s|',
|
||||
'samp|script|select|small|span|strike|strong|',
|
||||
'sub|sup|textarea|time|tt|u|var|wbr)[^>]*>|',
|
||||
'<(br)\\s?\\/?>)$'].join(''), 'i');
|
||||
|
||||
/******************************************************************
|
||||
* Utility Functions *
|
||||
*****************************************************************/
|
||||
|
||||
// patch for ie7
|
||||
if (!Array.indexOf) {
|
||||
Array.prototype.indexOf = function(obj) {
|
||||
for (var i = 0; i < this.length; i++) {
|
||||
if (this[i] == obj) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
};
|
||||
}
|
||||
|
||||
function trim(str) {
|
||||
return str.replace(/^\s+|\s+$/g, '');
|
||||
}
|
||||
|
||||
function rtrim(str) {
|
||||
return str.replace(/\s+$/g, '');
|
||||
}
|
||||
|
||||
// Remove one level of indentation from text. Indent is 4 spaces.
|
||||
function outdent(text) {
|
||||
return text.replace(new RegExp('^(\\t|[ ]{1,4})', 'gm'), '');
|
||||
}
|
||||
|
||||
function contains(str, substr) {
|
||||
return str.indexOf(substr) != -1;
|
||||
}
|
||||
|
||||
// Sanitize html, removing tags that aren't in the whitelist
|
||||
function sanitizeHtml(html, whitelist) {
|
||||
return html.replace(/<[^>]*>?/gi, function(tag) {
|
||||
return tag.match(whitelist) ? tag : '';
|
||||
});
|
||||
}
|
||||
|
||||
// Merge two arrays, keeping only unique elements.
|
||||
function union(x, y) {
|
||||
var obj = {};
|
||||
for (var i = 0; i < x.length; i++)
|
||||
obj[x[i]] = x[i];
|
||||
for (i = 0; i < y.length; i++)
|
||||
obj[y[i]] = y[i];
|
||||
var res = [];
|
||||
for (var k in obj) {
|
||||
if (obj.hasOwnProperty(k))
|
||||
res.push(obj[k]);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
// JS regexes don't support \A or \Z, so we add sentinels, as Pagedown
|
||||
// does. In this case, we add the ascii codes for start of text (STX) and
|
||||
// end of text (ETX), an idea borrowed from:
|
||||
// https://github.com/tanakahisateru/js-markdown-extra
|
||||
function addAnchors(text) {
|
||||
if(text.charAt(0) != '\x02')
|
||||
text = '\x02' + text;
|
||||
if(text.charAt(text.length - 1) != '\x03')
|
||||
text = text + '\x03';
|
||||
return text;
|
||||
}
|
||||
|
||||
// Remove STX and ETX sentinels.
|
||||
function removeAnchors(text) {
|
||||
if(text.charAt(0) == '\x02')
|
||||
text = text.substr(1);
|
||||
if(text.charAt(text.length - 1) == '\x03')
|
||||
text = text.substr(0, text.length - 1);
|
||||
return text;
|
||||
}
|
||||
|
||||
// Convert markdown within an element, retaining only span-level tags
|
||||
function convertSpans(text, extra) {
|
||||
return sanitizeHtml(convertAll(text, extra), inlineTags);
|
||||
}
|
||||
|
||||
// Convert internal markdown using the stock pagedown converter
|
||||
function convertAll(text, extra) {
|
||||
var result = extra.blockGamutHookCallback(text);
|
||||
// We need to perform these operations since we skip the steps in the converter
|
||||
result = unescapeSpecialChars(result);
|
||||
result = result.replace(/~D/g, "$$").replace(/~T/g, "~");
|
||||
result = extra.previousPostConversion(result);
|
||||
return result;
|
||||
}
|
||||
|
||||
// Convert escaped special characters
|
||||
function processEscapesStep1(text) {
|
||||
// Markdown extra adds two escapable characters, `:` and `|`
|
||||
return text.replace(/\\\|/g, '~I').replace(/\\:/g, '~i');
|
||||
}
|
||||
function processEscapesStep2(text) {
|
||||
return text.replace(/~I/g, '|').replace(/~i/g, ':');
|
||||
}
|
||||
|
||||
// Duplicated from PageDown converter
|
||||
function unescapeSpecialChars(text) {
|
||||
// Swap back in all the special characters we've hidden.
|
||||
text = text.replace(/~E(\d+)E/g, function(wholeMatch, m1) {
|
||||
var charCodeToReplace = parseInt(m1);
|
||||
return String.fromCharCode(charCodeToReplace);
|
||||
});
|
||||
return text;
|
||||
}
|
||||
|
||||
function slugify(text) {
|
||||
return text.toLowerCase()
|
||||
.replace(/\s+/g, '-') // Replace spaces with -
|
||||
.replace(/[^\w\-]+/g, '') // Remove all non-word chars
|
||||
.replace(/\-\-+/g, '-') // Replace multiple - with single -
|
||||
.replace(/^-+/, '') // Trim - from start of text
|
||||
.replace(/-+$/, ''); // Trim - from end of text
|
||||
}
|
||||
|
||||
/*****************************************************************************
|
||||
* Markdown.Extra *
|
||||
****************************************************************************/
|
||||
|
||||
Markdown.Extra = function() {
|
||||
// For converting internal markdown (in tables for instance).
|
||||
// This is necessary since these methods are meant to be called as
|
||||
// preConversion hooks, and the Markdown converter passed to init()
|
||||
// won't convert any markdown contained in the html tags we return.
|
||||
this.converter = null;
|
||||
|
||||
// Stores html blocks we generate in hooks so that
|
||||
// they're not destroyed if the user is using a sanitizing converter
|
||||
this.hashBlocks = [];
|
||||
|
||||
// Stores footnotes
|
||||
this.footnotes = {};
|
||||
this.usedFootnotes = [];
|
||||
|
||||
// Special attribute blocks for fenced code blocks and headers enabled.
|
||||
this.attributeBlocks = false;
|
||||
|
||||
// Fenced code block options
|
||||
this.googleCodePrettify = false;
|
||||
this.highlightJs = false;
|
||||
|
||||
// Table options
|
||||
this.tableClass = '';
|
||||
|
||||
this.tabWidth = 4;
|
||||
};
|
||||
|
||||
Markdown.Extra.init = function(converter, options) {
|
||||
// Each call to init creates a new instance of Markdown.Extra so it's
|
||||
// safe to have multiple converters, with different options, on a single page
|
||||
var extra = new Markdown.Extra();
|
||||
var postNormalizationTransformations = [];
|
||||
var preBlockGamutTransformations = [];
|
||||
var postSpanGamutTransformations = [];
|
||||
var postConversionTransformations = ["unHashExtraBlocks"];
|
||||
|
||||
options = options || {};
|
||||
options.extensions = options.extensions || ["all"];
|
||||
if (contains(options.extensions, "all")) {
|
||||
options.extensions = ["tables", "fenced_code_gfm", "def_list", "attr_list", "footnotes", "smartypants", "strikethrough", "newlines"];
|
||||
}
|
||||
preBlockGamutTransformations.push("wrapHeaders");
|
||||
if (contains(options.extensions, "attr_list")) {
|
||||
postNormalizationTransformations.push("hashFcbAttributeBlocks");
|
||||
preBlockGamutTransformations.push("hashHeaderAttributeBlocks");
|
||||
postConversionTransformations.push("applyAttributeBlocks");
|
||||
extra.attributeBlocks = true;
|
||||
}
|
||||
if (contains(options.extensions, "fenced_code_gfm")) {
|
||||
// This step will convert fcb inside list items and blockquotes
|
||||
preBlockGamutTransformations.push("fencedCodeBlocks");
|
||||
// This extra step is to prevent html blocks hashing and link definition/footnotes stripping inside fcb
|
||||
postNormalizationTransformations.push("fencedCodeBlocks");
|
||||
}
|
||||
if (contains(options.extensions, "tables")) {
|
||||
preBlockGamutTransformations.push("tables");
|
||||
}
|
||||
if (contains(options.extensions, "def_list")) {
|
||||
preBlockGamutTransformations.push("definitionLists");
|
||||
}
|
||||
if (contains(options.extensions, "footnotes")) {
|
||||
postNormalizationTransformations.push("stripFootnoteDefinitions");
|
||||
preBlockGamutTransformations.push("doFootnotes");
|
||||
postConversionTransformations.push("printFootnotes");
|
||||
}
|
||||
if (contains(options.extensions, "smartypants")) {
|
||||
postConversionTransformations.push("runSmartyPants");
|
||||
}
|
||||
if (contains(options.extensions, "strikethrough")) {
|
||||
postSpanGamutTransformations.push("strikethrough");
|
||||
}
|
||||
if (contains(options.extensions, "newlines")) {
|
||||
postSpanGamutTransformations.push("newlines");
|
||||
}
|
||||
|
||||
converter.hooks.chain("postNormalization", function(text) {
|
||||
return extra.doTransform(postNormalizationTransformations, text) + '\n';
|
||||
});
|
||||
|
||||
converter.hooks.chain("preBlockGamut", function(text, blockGamutHookCallback) {
|
||||
// Keep a reference to the block gamut callback to run recursively
|
||||
extra.blockGamutHookCallback = blockGamutHookCallback;
|
||||
text = processEscapesStep1(text);
|
||||
text = extra.doTransform(preBlockGamutTransformations, text) + '\n';
|
||||
text = processEscapesStep2(text);
|
||||
return text;
|
||||
});
|
||||
|
||||
converter.hooks.chain("postSpanGamut", function(text) {
|
||||
return extra.doTransform(postSpanGamutTransformations, text);
|
||||
});
|
||||
|
||||
// Keep a reference to the hook chain running before doPostConversion to apply on hashed extra blocks
|
||||
extra.previousPostConversion = converter.hooks.postConversion;
|
||||
converter.hooks.chain("postConversion", function(text) {
|
||||
text = extra.doTransform(postConversionTransformations, text);
|
||||
// Clear state vars that may use unnecessary memory
|
||||
extra.hashBlocks = [];
|
||||
extra.footnotes = {};
|
||||
extra.usedFootnotes = [];
|
||||
return text;
|
||||
});
|
||||
|
||||
if ("highlighter" in options) {
|
||||
extra.googleCodePrettify = options.highlighter === 'prettify';
|
||||
extra.highlightJs = options.highlighter === 'highlight';
|
||||
}
|
||||
|
||||
if ("table_class" in options) {
|
||||
extra.tableClass = options.table_class;
|
||||
}
|
||||
|
||||
extra.converter = converter;
|
||||
|
||||
// Caller usually won't need this, but it's handy for testing.
|
||||
return extra;
|
||||
};
|
||||
|
||||
// Do transformations
|
||||
Markdown.Extra.prototype.doTransform = function(transformations, text) {
|
||||
for(var i = 0; i < transformations.length; i++)
|
||||
text = this[transformations[i]](text);
|
||||
return text;
|
||||
};
|
||||
|
||||
// Return a placeholder containing a key, which is the block's index in the
|
||||
// hashBlocks array. We wrap our output in a <p> tag here so Pagedown won't.
|
||||
Markdown.Extra.prototype.hashExtraBlock = function(block) {
|
||||
return '\n<p>~X' + (this.hashBlocks.push(block) - 1) + 'X</p>\n';
|
||||
};
|
||||
Markdown.Extra.prototype.hashExtraInline = function(block) {
|
||||
return '~X' + (this.hashBlocks.push(block) - 1) + 'X';
|
||||
};
|
||||
|
||||
// Replace placeholder blocks in `text` with their corresponding
|
||||
// html blocks in the hashBlocks array.
|
||||
Markdown.Extra.prototype.unHashExtraBlocks = function(text) {
|
||||
var self = this;
|
||||
function recursiveUnHash() {
|
||||
var hasHash = false;
|
||||
text = text.replace(/(?:<p>)?~X(\d+)X(?:<\/p>)?/g, function(wholeMatch, m1) {
|
||||
hasHash = true;
|
||||
var key = parseInt(m1, 10);
|
||||
return self.hashBlocks[key];
|
||||
});
|
||||
if(hasHash === true) {
|
||||
recursiveUnHash();
|
||||
}
|
||||
}
|
||||
recursiveUnHash();
|
||||
return text;
|
||||
};
|
||||
|
||||
// Wrap headers to make sure they won't be in def lists
|
||||
Markdown.Extra.prototype.wrapHeaders = function(text) {
|
||||
function wrap(text) {
|
||||
return '\n' + text + '\n';
|
||||
}
|
||||
text = text.replace(/^.+[ \t]*\n=+[ \t]*\n+/gm, wrap);
|
||||
text = text.replace(/^.+[ \t]*\n-+[ \t]*\n+/gm, wrap);
|
||||
text = text.replace(/^\#{1,6}[ \t]*.+?[ \t]*\#*\n+/gm, wrap);
|
||||
return text;
|
||||
};
|
||||
|
||||
|
||||
/******************************************************************
|
||||
* Attribute Blocks *
|
||||
*****************************************************************/
|
||||
|
||||
// TODO: use sentinels. Should we just add/remove them in doConversion?
|
||||
// TODO: better matches for id / class attributes
|
||||
var attrBlock = "\\{[ \\t]*((?:[#.][-_:a-zA-Z0-9]+[ \\t]*)+)\\}";
|
||||
var hdrAttributesA = new RegExp("^(#{1,6}.*#{0,6})[ \\t]+" + attrBlock + "[ \\t]*(?:\\n|0x03)", "gm");
|
||||
var hdrAttributesB = new RegExp("^(.*)[ \\t]+" + attrBlock + "[ \\t]*\\n" +
|
||||
"(?=[\\-|=]+\\s*(?:\\n|0x03))", "gm"); // underline lookahead
|
||||
var fcbAttributes = new RegExp("^(```[^`\\n]*)[ \\t]+" + attrBlock + "[ \\t]*\\n" +
|
||||
"(?=([\\s\\S]*?)\\n```[ \\t]*(\\n|0x03))", "gm");
|
||||
|
||||
// Extract headers attribute blocks, move them above the element they will be
|
||||
// applied to, and hash them for later.
|
||||
Markdown.Extra.prototype.hashHeaderAttributeBlocks = function(text) {
|
||||
|
||||
var self = this;
|
||||
function attributeCallback(wholeMatch, pre, attr) {
|
||||
return '<p>~XX' + (self.hashBlocks.push(attr) - 1) + 'XX</p>\n' + pre + "\n";
|
||||
}
|
||||
|
||||
text = text.replace(hdrAttributesA, attributeCallback); // ## headers
|
||||
text = text.replace(hdrAttributesB, attributeCallback); // underline headers
|
||||
return text;
|
||||
};
|
||||
|
||||
// Extract FCB attribute blocks, move them above the element they will be
|
||||
// applied to, and hash them for later.
|
||||
Markdown.Extra.prototype.hashFcbAttributeBlocks = function(text) {
|
||||
// TODO: use sentinels. Should we just add/remove them in doConversion?
|
||||
// TODO: better matches for id / class attributes
|
||||
|
||||
var self = this;
|
||||
function attributeCallback(wholeMatch, pre, attr) {
|
||||
return '<p>~XX' + (self.hashBlocks.push(attr) - 1) + 'XX</p>\n' + pre + "\n";
|
||||
}
|
||||
|
||||
return text.replace(fcbAttributes, attributeCallback);
|
||||
};
|
||||
|
||||
Markdown.Extra.prototype.applyAttributeBlocks = function(text) {
|
||||
var self = this;
|
||||
var blockRe = new RegExp('<p>~XX(\\d+)XX</p>[\\s]*' +
|
||||
'(?:<(h[1-6]|pre)(?: +class="(\\S+)")?(>[\\s\\S]*?</\\2>))', "gm");
|
||||
text = text.replace(blockRe, function(wholeMatch, k, tag, cls, rest) {
|
||||
if (!tag) // no following header or fenced code block.
|
||||
return '';
|
||||
|
||||
// get attributes list from hash
|
||||
var key = parseInt(k, 10);
|
||||
var attributes = self.hashBlocks[key];
|
||||
|
||||
// get id
|
||||
var id = attributes.match(/#[^\s#.]+/g) || [];
|
||||
var idStr = id[0] ? ' id="' + id[0].substr(1, id[0].length - 1) + '"' : '';
|
||||
|
||||
// get classes and merge with existing classes
|
||||
var classes = attributes.match(/\.[^\s#.]+/g) || [];
|
||||
for (var i = 0; i < classes.length; i++) // Remove leading dot
|
||||
classes[i] = classes[i].substr(1, classes[i].length - 1);
|
||||
|
||||
var classStr = '';
|
||||
if (cls)
|
||||
classes = union(classes, [cls]);
|
||||
|
||||
if (classes.length > 0)
|
||||
classStr = ' class="' + classes.join(' ') + '"';
|
||||
|
||||
return "<" + tag + idStr + classStr + rest;
|
||||
});
|
||||
|
||||
return text;
|
||||
};
|
||||
|
||||
/******************************************************************
|
||||
* Tables *
|
||||
*****************************************************************/
|
||||
|
||||
// Find and convert Markdown Extra tables into html.
|
||||
Markdown.Extra.prototype.tables = function(text) {
|
||||
var self = this;
|
||||
|
||||
var leadingPipe = new RegExp(
|
||||
['^' ,
|
||||
'[ ]{0,3}' , // Allowed whitespace
|
||||
'[|]' , // Initial pipe
|
||||
'(.+)\\n' , // $1: Header Row
|
||||
|
||||
'[ ]{0,3}' , // Allowed whitespace
|
||||
'[|]([ ]*[-:]+[-| :]*)\\n' , // $2: Separator
|
||||
|
||||
'(' , // $3: Table Body
|
||||
'(?:[ ]*[|].*\\n?)*' , // Table rows
|
||||
')',
|
||||
'(?:\\n|$)' // Stop at final newline
|
||||
].join(''),
|
||||
'gm'
|
||||
);
|
||||
|
||||
var noLeadingPipe = new RegExp(
|
||||
['^' ,
|
||||
'[ ]{0,3}' , // Allowed whitespace
|
||||
'(\\S.*[|].*)\\n' , // $1: Header Row
|
||||
|
||||
'[ ]{0,3}' , // Allowed whitespace
|
||||
'([-:]+[ ]*[|][-| :]*)\\n' , // $2: Separator
|
||||
|
||||
'(' , // $3: Table Body
|
||||
'(?:.*[|].*\\n?)*' , // Table rows
|
||||
')' ,
|
||||
'(?:\\n|$)' // Stop at final newline
|
||||
].join(''),
|
||||
'gm'
|
||||
);
|
||||
|
||||
text = text.replace(leadingPipe, doTable);
|
||||
text = text.replace(noLeadingPipe, doTable);
|
||||
|
||||
// $1 = header, $2 = separator, $3 = body
|
||||
function doTable(match, header, separator, body, offset, string) {
|
||||
// remove any leading pipes and whitespace
|
||||
header = header.replace(/^ *[|]/m, '');
|
||||
separator = separator.replace(/^ *[|]/m, '');
|
||||
body = body.replace(/^ *[|]/gm, '');
|
||||
|
||||
// remove trailing pipes and whitespace
|
||||
header = header.replace(/[|] *$/m, '');
|
||||
separator = separator.replace(/[|] *$/m, '');
|
||||
body = body.replace(/[|] *$/gm, '');
|
||||
|
||||
// determine column alignments
|
||||
var alignspecs = separator.split(/ *[|] */);
|
||||
var align = [];
|
||||
for (var i = 0; i < alignspecs.length; i++) {
|
||||
var spec = alignspecs[i];
|
||||
if (spec.match(/^ *-+: *$/m))
|
||||
align[i] = ' align="right"';
|
||||
else if (spec.match(/^ *:-+: *$/m))
|
||||
align[i] = ' align="center"';
|
||||
else if (spec.match(/^ *:-+ *$/m))
|
||||
align[i] = ' align="left"';
|
||||
else align[i] = '';
|
||||
}
|
||||
|
||||
// TODO: parse spans in header and rows before splitting, so that pipes
|
||||
// inside of tags are not interpreted as separators
|
||||
var headers = header.split(/ *[|] */);
|
||||
var colCount = headers.length;
|
||||
|
||||
// build html
|
||||
var cls = self.tableClass ? ' class="' + self.tableClass + '"' : '';
|
||||
var html = ['<table', cls, '>\n', '<thead>\n', '<tr>\n'].join('');
|
||||
|
||||
// build column headers.
|
||||
for (i = 0; i < colCount; i++) {
|
||||
var headerHtml = convertSpans(trim(headers[i]), self);
|
||||
html += [" <th", align[i], ">", headerHtml, "</th>\n"].join('');
|
||||
}
|
||||
html += "</tr>\n</thead>\n";
|
||||
|
||||
// build rows
|
||||
var rows = body.split('\n');
|
||||
for (i = 0; i < rows.length; i++) {
|
||||
if (rows[i].match(/^\s*$/)) // can apply to final row
|
||||
continue;
|
||||
|
||||
// ensure number of rowCells matches colCount
|
||||
var rowCells = rows[i].split(/ *[|] */);
|
||||
var lenDiff = colCount - rowCells.length;
|
||||
for (var j = 0; j < lenDiff; j++)
|
||||
rowCells.push('');
|
||||
|
||||
html += "<tr>\n";
|
||||
for (j = 0; j < colCount; j++) {
|
||||
var colHtml = convertSpans(trim(rowCells[j]), self);
|
||||
html += [" <td", align[j], ">", colHtml, "</td>\n"].join('');
|
||||
}
|
||||
html += "</tr>\n";
|
||||
}
|
||||
|
||||
html += "</table>\n";
|
||||
|
||||
// replace html with placeholder until postConversion step
|
||||
return self.hashExtraBlock(html);
|
||||
}
|
||||
|
||||
return text;
|
||||
};
|
||||
|
||||
|
||||
/******************************************************************
|
||||
* Footnotes *
|
||||
*****************************************************************/
|
||||
|
||||
// Strip footnote, store in hashes.
|
||||
Markdown.Extra.prototype.stripFootnoteDefinitions = function(text) {
|
||||
var self = this;
|
||||
|
||||
text = text.replace(
|
||||
/\n[ ]{0,3}\[\^(.+?)\]\:[ \t]*\n?([\s\S]*?)\n{1,2}((?=\n[ ]{0,3}\S)|$)/g,
|
||||
function(wholeMatch, m1, m2) {
|
||||
m1 = slugify(m1);
|
||||
m2 += "\n";
|
||||
m2 = m2.replace(/^[ ]{0,3}/g, "");
|
||||
self.footnotes[m1] = m2;
|
||||
return "\n";
|
||||
});
|
||||
|
||||
return text;
|
||||
};
|
||||
|
||||
|
||||
// Find and convert footnotes references.
|
||||
Markdown.Extra.prototype.doFootnotes = function(text) {
|
||||
var self = this;
|
||||
if(self.isConvertingFootnote === true) {
|
||||
return text;
|
||||
}
|
||||
|
||||
var footnoteCounter = 0;
|
||||
text = text.replace(/\[\^(.+?)\]/g, function(wholeMatch, m1) {
|
||||
var id = slugify(m1);
|
||||
var footnote = self.footnotes[id];
|
||||
if (footnote === undefined) {
|
||||
return wholeMatch;
|
||||
}
|
||||
footnoteCounter++;
|
||||
self.usedFootnotes.push(id);
|
||||
var html = '<a href="#fn:' + id + '" id="fnref:' + id
|
||||
+ '" title="See footnote" class="footnote">' + footnoteCounter
|
||||
+ '</a>';
|
||||
return self.hashExtraInline(html);
|
||||
});
|
||||
|
||||
return text;
|
||||
};
|
||||
|
||||
// Print footnotes at the end of the document
|
||||
Markdown.Extra.prototype.printFootnotes = function(text) {
|
||||
var self = this;
|
||||
|
||||
if (self.usedFootnotes.length === 0) {
|
||||
return text;
|
||||
}
|
||||
|
||||
text += '\n\n<div class="footnotes">\n<hr>\n<ol>\n\n';
|
||||
for(var i=0; i<self.usedFootnotes.length; i++) {
|
||||
var id = self.usedFootnotes[i];
|
||||
var footnote = self.footnotes[id];
|
||||
self.isConvertingFootnote = true;
|
||||
var formattedfootnote = convertSpans(footnote, self);
|
||||
delete self.isConvertingFootnote;
|
||||
text += '<li id="fn:'
|
||||
+ id
|
||||
+ '">'
|
||||
+ formattedfootnote
|
||||
+ ' <a href="#fnref:'
|
||||
+ id
|
||||
+ '" title="Return to article" class="reversefootnote">↩</a></li>\n\n';
|
||||
}
|
||||
text += '</ol>\n</div>';
|
||||
return text;
|
||||
};
|
||||
|
||||
|
||||
/******************************************************************
|
||||
* Fenced Code Blocks (gfm) *
|
||||
******************************************************************/
|
||||
|
||||
// Find and convert gfm-inspired fenced code blocks into html.
|
||||
Markdown.Extra.prototype.fencedCodeBlocks = function(text) {
|
||||
function encodeCode(code) {
|
||||
code = code.replace(/&/g, "&");
|
||||
code = code.replace(/</g, "<");
|
||||
code = code.replace(/>/g, ">");
|
||||
// These were escaped by PageDown before postNormalization
|
||||
code = code.replace(/~D/g, "$$");
|
||||
code = code.replace(/~T/g, "~");
|
||||
return code;
|
||||
}
|
||||
|
||||
var self = this;
|
||||
text = text.replace(/(?:^|\n)```([^`\n]*)\n([\s\S]*?)\n```[ \t]*(?=\n)/g, function(match, m1, m2) {
|
||||
var language = trim(m1), codeblock = m2;
|
||||
|
||||
// adhere to specified options
|
||||
var preclass = self.googleCodePrettify ? ' class="prettyprint"' : '';
|
||||
var codeclass = '';
|
||||
if (language) {
|
||||
if (self.googleCodePrettify || self.highlightJs) {
|
||||
// use html5 language- class names. supported by both prettify and highlight.js
|
||||
codeclass = ' class="language-' + language + '"';
|
||||
} else {
|
||||
codeclass = ' class="' + language + '"';
|
||||
}
|
||||
}
|
||||
|
||||
var html = ['<pre', preclass, '><code', codeclass, '>',
|
||||
encodeCode(codeblock), '</code></pre>'].join('');
|
||||
|
||||
// replace codeblock with placeholder until postConversion step
|
||||
return self.hashExtraBlock(html);
|
||||
});
|
||||
|
||||
return text;
|
||||
};
|
||||
|
||||
|
||||
/******************************************************************
|
||||
* SmartyPants *
|
||||
******************************************************************/
|
||||
|
||||
Markdown.Extra.prototype.educatePants = function(text) {
|
||||
var self = this;
|
||||
var result = '';
|
||||
var blockOffset = 0;
|
||||
// Here we parse HTML in a very bad manner
|
||||
text.replace(/(?:<!--[\s\S]*?-->)|(<)([a-zA-Z1-6]+)([^\n]*?>)([\s\S]*?)(<\/\2>)/g, function(wholeMatch, m1, m2, m3, m4, m5, offset) {
|
||||
var token = text.substring(blockOffset, offset);
|
||||
result += self.applyPants(token);
|
||||
self.smartyPantsLastChar = result.substring(result.length - 1);
|
||||
blockOffset = offset + wholeMatch.length;
|
||||
if(!m1) {
|
||||
// Skip commentary
|
||||
result += wholeMatch;
|
||||
return;
|
||||
}
|
||||
// Skip special tags
|
||||
if(!/code|kbd|pre|script|noscript|iframe|math|ins|del|pre/i.test(m2)) {
|
||||
m4 = self.educatePants(m4);
|
||||
}
|
||||
else {
|
||||
self.smartyPantsLastChar = m4.substring(m4.length - 1);
|
||||
}
|
||||
result += m1 + m2 + m3 + m4 + m5;
|
||||
});
|
||||
var lastToken = text.substring(blockOffset);
|
||||
result += self.applyPants(lastToken);
|
||||
self.smartyPantsLastChar = result.substring(result.length - 1);
|
||||
return result;
|
||||
};
|
||||
|
||||
function revertPants(wholeMatch, m1) {
|
||||
var blockText = m1;
|
||||
blockText = blockText.replace(/&\#8220;/g, "\"");
|
||||
blockText = blockText.replace(/&\#8221;/g, "\"");
|
||||
blockText = blockText.replace(/&\#8216;/g, "'");
|
||||
blockText = blockText.replace(/&\#8217;/g, "'");
|
||||
blockText = blockText.replace(/&\#8212;/g, "---");
|
||||
blockText = blockText.replace(/&\#8211;/g, "--");
|
||||
blockText = blockText.replace(/&\#8230;/g, "...");
|
||||
return blockText;
|
||||
}
|
||||
|
||||
Markdown.Extra.prototype.applyPants = function(text) {
|
||||
// Dashes
|
||||
text = text.replace(/---/g, "—").replace(/--/g, "–");
|
||||
// Ellipses
|
||||
text = text.replace(/\.\.\./g, "…").replace(/\.\s\.\s\./g, "…");
|
||||
// Backticks
|
||||
text = text.replace(/``/g, "“").replace (/''/g, "”");
|
||||
|
||||
if(/^'$/.test(text)) {
|
||||
// Special case: single-character ' token
|
||||
if(/\S/.test(this.smartyPantsLastChar)) {
|
||||
return "’";
|
||||
}
|
||||
return "‘";
|
||||
}
|
||||
if(/^"$/.test(text)) {
|
||||
// Special case: single-character " token
|
||||
if(/\S/.test(this.smartyPantsLastChar)) {
|
||||
return "”";
|
||||
}
|
||||
return "“";
|
||||
}
|
||||
|
||||
// Special case if the very first character is a quote
|
||||
// followed by punctuation at a non-word-break. Close the quotes by brute force:
|
||||
text = text.replace (/^'(?=[!"#\$\%'()*+,\-.\/:;<=>?\@\[\\]\^_`{|}~]\B)/, "’");
|
||||
text = text.replace (/^"(?=[!"#\$\%'()*+,\-.\/:;<=>?\@\[\\]\^_`{|}~]\B)/, "”");
|
||||
|
||||
// Special case for double sets of quotes, e.g.:
|
||||
// <p>He said, "'Quoted' words in a larger quote."</p>
|
||||
text = text.replace(/"'(?=\w)/g, "“‘");
|
||||
text = text.replace(/'"(?=\w)/g, "‘“");
|
||||
|
||||
// Special case for decade abbreviations (the '80s):
|
||||
text = text.replace(/'(?=\d{2}s)/g, "’");
|
||||
|
||||
// Get most opening single quotes:
|
||||
text = text.replace(/(\s| |--|&[mn]dash;|&\#8211;|&\#8212;|&\#x201[34];)'(?=\w)/g, "$1‘");
|
||||
|
||||
// Single closing quotes:
|
||||
text = text.replace(/([^\s\[\{\(\-])'/g, "$1’");
|
||||
text = text.replace(/'(?=\s|s\b)/g, "’");
|
||||
|
||||
// Any remaining single quotes should be opening ones:
|
||||
text = text.replace(/'/g, "‘");
|
||||
|
||||
// Get most opening double quotes:
|
||||
text = text.replace(/(\s| |--|&[mn]dash;|&\#8211;|&\#8212;|&\#x201[34];)"(?=\w)/g, "$1“");
|
||||
|
||||
// Double closing quotes:
|
||||
text = text.replace(/([^\s\[\{\(\-])"/g, "$1”");
|
||||
text = text.replace(/"(?=\s)/g, "”");
|
||||
|
||||
// Any remaining quotes should be opening ones.
|
||||
text = text.replace(/"/ig, "“");
|
||||
return text;
|
||||
};
|
||||
|
||||
// Find and convert markdown extra definition lists into html.
|
||||
Markdown.Extra.prototype.runSmartyPants = function(text) {
|
||||
this.smartyPantsLastChar = '';
|
||||
text = this.educatePants(text);
|
||||
// Clean everything inside html tags (some of them may have been converted due to our rough html parsing)
|
||||
text = text.replace(/(<([a-zA-Z1-6]+)\b([^\n>]*?)(\/)?>)/g, revertPants);
|
||||
return text;
|
||||
};
|
||||
|
||||
/******************************************************************
|
||||
* Definition Lists *
|
||||
******************************************************************/
|
||||
|
||||
// Find and convert markdown extra definition lists into html.
|
||||
Markdown.Extra.prototype.definitionLists = function(text) {
|
||||
var wholeList = new RegExp(
|
||||
['(\\x02\\n?|\\n\\n)' ,
|
||||
'(?:' ,
|
||||
'(' , // $1 = whole list
|
||||
'(' , // $2
|
||||
'[ ]{0,3}' ,
|
||||
'((?:[ \\t]*\\S.*\\n)+)', // $3 = defined term
|
||||
'\\n?' ,
|
||||
'[ ]{0,3}:[ ]+' , // colon starting definition
|
||||
')' ,
|
||||
'([\\s\\S]+?)' ,
|
||||
'(' , // $4
|
||||
'(?=\\0x03)' , // \z
|
||||
'|' ,
|
||||
'(?=' ,
|
||||
'\\n{2,}' ,
|
||||
'(?=\\S)' ,
|
||||
'(?!' , // Negative lookahead for another term
|
||||
'[ ]{0,3}' ,
|
||||
'(?:\\S.*\\n)+?' , // defined term
|
||||
'\\n?' ,
|
||||
'[ ]{0,3}:[ ]+' , // colon starting definition
|
||||
')' ,
|
||||
'(?!' , // Negative lookahead for another definition
|
||||
'[ ]{0,3}:[ ]+' , // colon starting definition
|
||||
')' ,
|
||||
')' ,
|
||||
')' ,
|
||||
')' ,
|
||||
')'
|
||||
].join(''),
|
||||
'gm'
|
||||
);
|
||||
|
||||
var self = this;
|
||||
text = addAnchors(text);
|
||||
|
||||
text = text.replace(wholeList, function(match, pre, list) {
|
||||
var result = trim(self.processDefListItems(list));
|
||||
result = "<dl>\n" + result + "\n</dl>";
|
||||
return pre + self.hashExtraBlock(result) + "\n\n";
|
||||
});
|
||||
|
||||
return removeAnchors(text);
|
||||
};
|
||||
|
||||
// Process the contents of a single definition list, splitting it
|
||||
// into individual term and definition list items.
|
||||
Markdown.Extra.prototype.processDefListItems = function(listStr) {
|
||||
var self = this;
|
||||
|
||||
var dt = new RegExp(
|
||||
['(\\x02\\n?|\\n\\n+)' , // leading line
|
||||
'(' , // definition terms = $1
|
||||
'[ ]{0,3}' , // leading whitespace
|
||||
'(?![:][ ]|[ ])' , // negative lookahead for a definition
|
||||
// mark (colon) or more whitespace
|
||||
'(?:\\S.*\\n)+?' , // actual term (not whitespace)
|
||||
')' ,
|
||||
'(?=\\n?[ ]{0,3}:[ ])' // lookahead for following line feed
|
||||
].join(''), // with a definition mark
|
||||
'gm'
|
||||
);
|
||||
|
||||
var dd = new RegExp(
|
||||
['\\n(\\n+)?' , // leading line = $1
|
||||
'(' , // marker space = $2
|
||||
'[ ]{0,3}' , // whitespace before colon
|
||||
'[:][ ]+' , // definition mark (colon)
|
||||
')' ,
|
||||
'([\\s\\S]+?)' , // definition text = $3
|
||||
'(?=\\n*' , // stop at next definition mark,
|
||||
'(?:' , // next term or end of text
|
||||
'\\n[ ]{0,3}[:][ ]|' ,
|
||||
'<dt>|\\x03' , // \z
|
||||
')' ,
|
||||
')'
|
||||
].join(''),
|
||||
'gm'
|
||||
);
|
||||
|
||||
listStr = addAnchors(listStr);
|
||||
// trim trailing blank lines:
|
||||
listStr = listStr.replace(/\n{2,}(?=\\x03)/, "\n");
|
||||
|
||||
// Process definition terms.
|
||||
listStr = listStr.replace(dt, function(match, pre, termsStr) {
|
||||
var terms = trim(termsStr).split("\n");
|
||||
var text = '';
|
||||
for (var i = 0; i < terms.length; i++) {
|
||||
var term = terms[i];
|
||||
// process spans inside dt
|
||||
term = convertSpans(trim(term), self);
|
||||
text += "\n<dt>" + term + "</dt>";
|
||||
}
|
||||
return text + "\n";
|
||||
});
|
||||
|
||||
// Process actual definitions.
|
||||
listStr = listStr.replace(dd, function(match, leadingLine, markerSpace, def) {
|
||||
if (leadingLine || def.match(/\n{2,}/)) {
|
||||
// replace marker with the appropriate whitespace indentation
|
||||
def = Array(markerSpace.length + 1).join(' ') + def;
|
||||
// process markdown inside definition
|
||||
// TODO?: currently doesn't apply extensions
|
||||
def = outdent(def) + "\n\n";
|
||||
def = "\n" + convertAll(def, self) + "\n";
|
||||
} else {
|
||||
// convert span-level markdown inside definition
|
||||
def = rtrim(def);
|
||||
def = convertSpans(outdent(def), self);
|
||||
}
|
||||
|
||||
return "\n<dd>" + def + "</dd>\n";
|
||||
});
|
||||
|
||||
return removeAnchors(listStr);
|
||||
};
|
||||
|
||||
|
||||
/***********************************************************
|
||||
* Strikethrough *
|
||||
************************************************************/
|
||||
|
||||
Markdown.Extra.prototype.strikethrough = function(text) {
|
||||
// Pretty much duplicated from _DoItalicsAndBold
|
||||
return text.replace(/([\W_]|^)~T~T(?=\S)([^\r]*?\S[\*_]*)~T~T([\W_]|$)/g,
|
||||
"$1<del>$2</del>$3");
|
||||
};
|
||||
|
||||
|
||||
/***********************************************************
|
||||
* New lines *
|
||||
************************************************************/
|
||||
|
||||
Markdown.Extra.prototype.newlines = function(text) {
|
||||
// We have to ignore already converted newlines and line breaks in sub-list items
|
||||
return text.replace(/(<(?:br|\/li)>)?\n/g, function(wholeMatch, previousTag) {
|
||||
return previousTag ? wholeMatch : " <br>\n";
|
||||
});
|
||||
};
|
||||
|
||||
})();
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user