Compare commits
2 Commits
master
...
wip-fronte
Author | SHA1 | Date | |
---|---|---|---|
ec8f5032e9 | |||
7a5af9282c |
6
.gitignore
vendored
6
.gitignore
vendored
@ -12,12 +12,10 @@ config_local.py
|
|||||||
|
|
||||||
/build
|
/build
|
||||||
/.cache
|
/.cache
|
||||||
/.pytest_cache/
|
/*.egg-info/
|
||||||
*.egg-info/
|
|
||||||
profile.stats
|
profile.stats
|
||||||
/dump/
|
/dump/
|
||||||
/.eggs
|
/.eggs
|
||||||
/devdeps/pip-wheel-metadata/
|
|
||||||
|
|
||||||
/node_modules
|
/node_modules
|
||||||
/.sass-cache
|
/.sass-cache
|
||||||
@ -28,8 +26,6 @@ profile.stats
|
|||||||
|
|
||||||
pillar/web/static/assets/css/*.css
|
pillar/web/static/assets/css/*.css
|
||||||
pillar/web/static/assets/js/*.min.js
|
pillar/web/static/assets/js/*.min.js
|
||||||
pillar/web/static/assets/js/vendor/video.min.js
|
|
||||||
pillar/web/static/storage/
|
pillar/web/static/storage/
|
||||||
pillar/web/static/uploads/
|
pillar/web/static/uploads/
|
||||||
pillar/web/templates/
|
pillar/web/templates/
|
||||||
/poetry.lock
|
|
||||||
|
15
README.md
15
README.md
@ -3,7 +3,7 @@ Pillar
|
|||||||
|
|
||||||
This is the latest iteration on the Attract project. We are building a unified
|
This is the latest iteration on the Attract project. We are building a unified
|
||||||
framework called Pillar. Pillar will combine Blender Cloud and Attract. You
|
framework called Pillar. Pillar will combine Blender Cloud and Attract. You
|
||||||
can see Pillar in action on the [Blender Cloud](https://cloud.blender.org).
|
can see Pillar in action on the [Blender Cloud](https://cloud.bender.org).
|
||||||
|
|
||||||
## Custom fonts
|
## Custom fonts
|
||||||
|
|
||||||
@ -25,16 +25,15 @@ Don't forget to Gulp!
|
|||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
Dependencies are managed via [Poetry](https://poetry.eustace.io/).
|
|
||||||
|
|
||||||
Make sure your /data directory exists and is writable by the current user.
|
Make sure your /data directory exists and is writable by the current user.
|
||||||
Alternatively, provide a `pillar/config_local.py` that changes the relevant
|
Alternatively, provide a `pillar/config_local.py` that changes the relevant
|
||||||
settings.
|
settings.
|
||||||
|
|
||||||
```
|
```
|
||||||
git clone git@git.blender.org:pillar-python-sdk.git ../pillar-python-sdk
|
git clone git@git.blender.org:pillar-python-sdk.git ../pillar-python-sdk
|
||||||
pip install -U --user poetry
|
pip install -e ../pillar-python-sdk
|
||||||
poetry install
|
pip install -U -r requirements.txt
|
||||||
|
pip install -e .
|
||||||
```
|
```
|
||||||
|
|
||||||
## HDRi viewer
|
## HDRi viewer
|
||||||
@ -66,12 +65,6 @@ You can run the Celery Worker using `manage.py celery worker`.
|
|||||||
|
|
||||||
Find other Celery operations with the `manage.py celery` command.
|
Find other Celery operations with the `manage.py celery` command.
|
||||||
|
|
||||||
## Elasticsearch
|
|
||||||
|
|
||||||
Pillar uses [Elasticsearch](https://www.elastic.co/products/elasticsearch) to power the search engine.
|
|
||||||
You will need to run the `manage.py elastic reset_index` command to initialize the indexing.
|
|
||||||
If you need to reindex your documents in elastic you run the `manage.py elastic reindex` command.
|
|
||||||
|
|
||||||
## Translations
|
## Translations
|
||||||
|
|
||||||
If the language you want to support doesn't exist, you need to run: `translations init es_AR`.
|
If the language you want to support doesn't exist, you need to run: `translations init es_AR`.
|
||||||
|
@ -1,16 +0,0 @@
|
|||||||
[tool.poetry]
|
|
||||||
name = "pillar-devdeps"
|
|
||||||
version = "1.0"
|
|
||||||
description = ""
|
|
||||||
authors = [
|
|
||||||
"Francesco Siddi <francesco@blender.org>",
|
|
||||||
"Pablo Vazquez <pablo@blender.studio>",
|
|
||||||
"Sybren Stüvel <sybren@blender.studio>",
|
|
||||||
]
|
|
||||||
|
|
||||||
[tool.poetry.dependencies]
|
|
||||||
python = "~3.6"
|
|
||||||
mypy = "^0.501"
|
|
||||||
pytest = "~4.4"
|
|
||||||
pytest-cov = "~2.7"
|
|
||||||
responses = "~0.10"
|
|
198
gulpfile.js
198
gulpfile.js
@ -1,51 +1,37 @@
|
|||||||
let argv = require('minimist')(process.argv.slice(2));
|
var argv = require('minimist')(process.argv.slice(2));
|
||||||
let autoprefixer = require('gulp-autoprefixer');
|
var autoprefixer = require('gulp-autoprefixer');
|
||||||
let cache = require('gulp-cached');
|
var cache = require('gulp-cached');
|
||||||
let chmod = require('gulp-chmod');
|
var chmod = require('gulp-chmod');
|
||||||
let concat = require('gulp-concat');
|
var concat = require('gulp-concat');
|
||||||
let git = require('gulp-git');
|
var git = require('gulp-git');
|
||||||
let gulpif = require('gulp-if');
|
var gulpif = require('gulp-if');
|
||||||
let gulp = require('gulp');
|
var gulp = require('gulp');
|
||||||
let livereload = require('gulp-livereload');
|
var livereload = require('gulp-livereload');
|
||||||
let plumber = require('gulp-plumber');
|
var plumber = require('gulp-plumber');
|
||||||
let pug = require('gulp-pug');
|
var pug = require('gulp-pug');
|
||||||
let rename = require('gulp-rename');
|
var rename = require('gulp-rename');
|
||||||
let sass = require('gulp-sass');
|
var sass = require('gulp-sass');
|
||||||
let sourcemaps = require('gulp-sourcemaps');
|
var sourcemaps = require('gulp-sourcemaps');
|
||||||
let uglify = require('gulp-uglify-es').default;
|
var uglify = require('gulp-uglify');
|
||||||
let browserify = require('browserify');
|
|
||||||
let babelify = require('babelify');
|
|
||||||
let sourceStream = require('vinyl-source-stream');
|
|
||||||
let glob = require('glob');
|
|
||||||
let es = require('event-stream');
|
|
||||||
let path = require('path');
|
|
||||||
let buffer = require('vinyl-buffer');
|
|
||||||
|
|
||||||
let enabled = {
|
var enabled = {
|
||||||
uglify: argv.production,
|
uglify: argv.production,
|
||||||
maps: !argv.production,
|
maps: argv.production,
|
||||||
failCheck: !argv.production,
|
failCheck: !argv.production,
|
||||||
prettyPug: !argv.production,
|
prettyPug: !argv.production,
|
||||||
cachify: !argv.production,
|
cachify: !argv.production,
|
||||||
cleanup: argv.production,
|
cleanup: argv.production,
|
||||||
chmod: argv.production,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let destination = {
|
var destination = {
|
||||||
css: 'pillar/web/static/assets/css',
|
css: 'pillar/web/static/assets/css',
|
||||||
pug: 'pillar/web/templates',
|
pug: 'pillar/web/templates',
|
||||||
js: 'pillar/web/static/assets/js',
|
js: 'pillar/web/static/assets/js',
|
||||||
}
|
}
|
||||||
|
|
||||||
let source = {
|
|
||||||
bootstrap: 'node_modules/bootstrap/',
|
|
||||||
jquery: 'node_modules/jquery/',
|
|
||||||
popper: 'node_modules/popper.js/',
|
|
||||||
vue: 'node_modules/vue/',
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Stylesheets */
|
/* CSS */
|
||||||
gulp.task('styles', function(done) {
|
gulp.task('styles', function() {
|
||||||
gulp.src('src/styles/**/*.sass')
|
gulp.src('src/styles/**/*.sass')
|
||||||
.pipe(gulpif(enabled.failCheck, plumber()))
|
.pipe(gulpif(enabled.failCheck, plumber()))
|
||||||
.pipe(gulpif(enabled.maps, sourcemaps.init()))
|
.pipe(gulpif(enabled.maps, sourcemaps.init()))
|
||||||
@ -56,12 +42,11 @@ gulp.task('styles', function(done) {
|
|||||||
.pipe(gulpif(enabled.maps, sourcemaps.write(".")))
|
.pipe(gulpif(enabled.maps, sourcemaps.write(".")))
|
||||||
.pipe(gulp.dest(destination.css))
|
.pipe(gulp.dest(destination.css))
|
||||||
.pipe(gulpif(argv.livereload, livereload()));
|
.pipe(gulpif(argv.livereload, livereload()));
|
||||||
done();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
/* Templates */
|
/* Templates - Pug */
|
||||||
gulp.task('templates', function(done) {
|
gulp.task('templates', function() {
|
||||||
gulp.src('src/templates/**/*.pug')
|
gulp.src('src/templates/**/*.pug')
|
||||||
.pipe(gulpif(enabled.failCheck, plumber()))
|
.pipe(gulpif(enabled.failCheck, plumber()))
|
||||||
.pipe(gulpif(enabled.cachify, cache('templating')))
|
.pipe(gulpif(enabled.cachify, cache('templating')))
|
||||||
@ -70,12 +55,11 @@ gulp.task('templates', function(done) {
|
|||||||
}))
|
}))
|
||||||
.pipe(gulp.dest(destination.pug))
|
.pipe(gulp.dest(destination.pug))
|
||||||
.pipe(gulpif(argv.livereload, livereload()));
|
.pipe(gulpif(argv.livereload, livereload()));
|
||||||
done();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
/* Individual Uglified Scripts */
|
/* Individual Uglified Scripts */
|
||||||
gulp.task('scripts', function(done) {
|
gulp.task('scripts', function() {
|
||||||
gulp.src('src/scripts/*.js')
|
gulp.src('src/scripts/*.js')
|
||||||
.pipe(gulpif(enabled.failCheck, plumber()))
|
.pipe(gulpif(enabled.failCheck, plumber()))
|
||||||
.pipe(gulpif(enabled.cachify, cache('scripting')))
|
.pipe(gulpif(enabled.cachify, cache('scripting')))
|
||||||
@ -83,131 +67,56 @@ gulp.task('scripts', function(done) {
|
|||||||
.pipe(gulpif(enabled.uglify, uglify()))
|
.pipe(gulpif(enabled.uglify, uglify()))
|
||||||
.pipe(rename({suffix: '.min'}))
|
.pipe(rename({suffix: '.min'}))
|
||||||
.pipe(gulpif(enabled.maps, sourcemaps.write(".")))
|
.pipe(gulpif(enabled.maps, sourcemaps.write(".")))
|
||||||
.pipe(gulpif(enabled.chmod, chmod(0o644)))
|
.pipe(chmod(644))
|
||||||
.pipe(gulp.dest(destination.js))
|
.pipe(gulp.dest(destination.js))
|
||||||
.pipe(gulpif(argv.livereload, livereload()));
|
.pipe(gulpif(argv.livereload, livereload()));
|
||||||
done();
|
|
||||||
});
|
|
||||||
|
|
||||||
function browserify_base(entry) {
|
|
||||||
let pathSplited = path.dirname(entry).split(path.sep);
|
|
||||||
let moduleName = pathSplited[pathSplited.length - 1];
|
|
||||||
return browserify({
|
|
||||||
entries: [entry],
|
|
||||||
standalone: 'pillar.' + moduleName,
|
|
||||||
})
|
|
||||||
.transform(babelify, { "presets": ["@babel/preset-env"] })
|
|
||||||
.bundle()
|
|
||||||
.pipe(gulpif(enabled.failCheck, plumber()))
|
|
||||||
.pipe(sourceStream(path.basename(entry)))
|
|
||||||
.pipe(buffer())
|
|
||||||
.pipe(rename({
|
|
||||||
basename: moduleName,
|
|
||||||
extname: '.min.js'
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Transcompile and package common modules to be included in tutti.js.
|
|
||||||
*
|
|
||||||
* Example:
|
|
||||||
* src/scripts/js/es6/common/api/init.js
|
|
||||||
* src/scripts/js/es6/common/events/init.js
|
|
||||||
* Everything exported in api/init.js will end up in module pillar.api.*, and everything exported in events/init.js
|
|
||||||
* will end up in pillar.events.*
|
|
||||||
*/
|
|
||||||
function browserify_common() {
|
|
||||||
return glob.sync('src/scripts/js/es6/common/**/init.js').map(browserify_base);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Transcompile and package individual modules.
|
|
||||||
*
|
|
||||||
* Example:
|
|
||||||
* src/scripts/js/es6/individual/coolstuff/init.js
|
|
||||||
* Will create a coolstuff.js and everything exported in init.js will end up in namespace pillar.coolstuff.*
|
|
||||||
*/
|
|
||||||
gulp.task('scripts_browserify', function(done) {
|
|
||||||
glob('src/scripts/js/es6/individual/**/init.js', function(err, files) {
|
|
||||||
if(err) done(err);
|
|
||||||
|
|
||||||
var tasks = files.map(function(entry) {
|
|
||||||
return browserify_base(entry)
|
|
||||||
.pipe(gulpif(enabled.maps, sourcemaps.init()))
|
|
||||||
.pipe(gulpif(enabled.uglify, uglify()))
|
|
||||||
.pipe(gulpif(enabled.maps, sourcemaps.write(".")))
|
|
||||||
.pipe(gulp.dest(destination.js));
|
|
||||||
});
|
|
||||||
|
|
||||||
es.merge(tasks).on('end', done);
|
|
||||||
})
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
/* Collection of scripts in src/scripts/tutti/ and src/scripts/js/es6/common/ to merge into tutti.min.js
|
/* Collection of scripts in src/scripts/tutti/ to merge into tutti.min.js */
|
||||||
* Since it's always loaded, it's only for functions that we want site-wide.
|
/* Since it's always loaded, it's only for functions that we want site-wide */
|
||||||
* It also includes jQuery and Bootstrap (and its dependency popper), since
|
gulp.task('scripts_concat_tutti', function() {
|
||||||
* the site doesn't work without it anyway.*/
|
gulp.src('src/scripts/tutti/**/*.js')
|
||||||
gulp.task('scripts_concat_tutti', function(done) {
|
|
||||||
|
|
||||||
let toUglify = [
|
|
||||||
source.jquery + 'dist/jquery.min.js',
|
|
||||||
source.vue + (enabled.uglify ? 'dist/vue.min.js' : 'dist/vue.js'),
|
|
||||||
source.popper + 'dist/umd/popper.min.js',
|
|
||||||
source.bootstrap + 'js/dist/index.js',
|
|
||||||
source.bootstrap + 'js/dist/util.js',
|
|
||||||
source.bootstrap + 'js/dist/alert.js',
|
|
||||||
source.bootstrap + 'js/dist/collapse.js',
|
|
||||||
source.bootstrap + 'js/dist/dropdown.js',
|
|
||||||
source.bootstrap + 'js/dist/tooltip.js',
|
|
||||||
'src/scripts/tutti/**/*.js'
|
|
||||||
];
|
|
||||||
|
|
||||||
es.merge(gulp.src(toUglify), ...browserify_common())
|
|
||||||
.pipe(gulpif(enabled.failCheck, plumber()))
|
.pipe(gulpif(enabled.failCheck, plumber()))
|
||||||
.pipe(gulpif(enabled.maps, sourcemaps.init()))
|
.pipe(gulpif(enabled.maps, sourcemaps.init()))
|
||||||
.pipe(concat("tutti.min.js"))
|
.pipe(concat("tutti.min.js"))
|
||||||
.pipe(gulpif(enabled.uglify, uglify()))
|
.pipe(gulpif(enabled.uglify, uglify()))
|
||||||
.pipe(gulpif(enabled.maps, sourcemaps.write(".")))
|
.pipe(gulpif(enabled.maps, sourcemaps.write(".")))
|
||||||
.pipe(gulpif(enabled.chmod, chmod(0o644)))
|
.pipe(chmod(644))
|
||||||
.pipe(gulp.dest(destination.js))
|
.pipe(gulp.dest(destination.js))
|
||||||
.pipe(gulpif(argv.livereload, livereload()));
|
.pipe(gulpif(argv.livereload, livereload()));
|
||||||
done();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
gulp.task('scripts_concat_markdown', function() {
|
||||||
/* Simply move these vendor scripts from node_modules. */
|
gulp.src('src/scripts/markdown/**/*.js')
|
||||||
gulp.task('scripts_move_vendor', function(done) {
|
.pipe(gulpif(enabled.failCheck, plumber()))
|
||||||
|
.pipe(gulpif(enabled.maps, sourcemaps.init()))
|
||||||
let toMove = [
|
.pipe(concat("markdown.min.js"))
|
||||||
'node_modules/video.js/dist/video.min.js',
|
.pipe(gulpif(enabled.uglify, uglify()))
|
||||||
];
|
.pipe(gulpif(enabled.maps, sourcemaps.write(".")))
|
||||||
|
.pipe(chmod(644))
|
||||||
gulp.src(toMove)
|
.pipe(gulp.dest(destination.js))
|
||||||
.pipe(gulp.dest(destination.js + '/vendor/'));
|
.pipe(gulpif(argv.livereload, livereload()));
|
||||||
done();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
// While developing, run 'gulp watch'
|
// While developing, run 'gulp watch'
|
||||||
gulp.task('watch',function(done) {
|
gulp.task('watch',function() {
|
||||||
// Only listen for live reloads if ran with --livereload
|
// Only listen for live reloads if ran with --livereload
|
||||||
if (argv.livereload){
|
if (argv.livereload){
|
||||||
livereload.listen();
|
livereload.listen();
|
||||||
}
|
}
|
||||||
|
|
||||||
gulp.watch('src/styles/**/*.sass',gulp.series('styles'));
|
gulp.watch('src/styles/**/*.sass',['styles']);
|
||||||
gulp.watch('src/templates/**/*.pug',gulp.series('templates'));
|
gulp.watch('src/templates/**/*.pug',['templates']);
|
||||||
gulp.watch('src/scripts/*.js',gulp.series('scripts'));
|
gulp.watch('src/scripts/*.js',['scripts']);
|
||||||
gulp.watch('src/scripts/tutti/**/*.js',gulp.series('scripts_concat_tutti'));
|
gulp.watch('src/scripts/tutti/**/*.js',['scripts_concat_tutti']);
|
||||||
gulp.watch('src/scripts/js/**/*.js',gulp.series(['scripts_browserify', 'scripts_concat_tutti']));
|
gulp.watch('src/scripts/markdown/**/*.js',['scripts_concat_markdown']);
|
||||||
done();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
// Erases all generated files in output directories.
|
// Erases all generated files in output directories.
|
||||||
gulp.task('cleanup', function(done) {
|
gulp.task('cleanup', function() {
|
||||||
let paths = [];
|
var paths = [];
|
||||||
for (attr in destination) {
|
for (attr in destination) {
|
||||||
paths.push(destination[attr]);
|
paths.push(destination[attr]);
|
||||||
}
|
}
|
||||||
@ -215,20 +124,17 @@ gulp.task('cleanup', function(done) {
|
|||||||
git.clean({ args: '-f -X ' + paths.join(' ') }, function (err) {
|
git.clean({ args: '-f -X ' + paths.join(' ') }, function (err) {
|
||||||
if(err) throw err;
|
if(err) throw err;
|
||||||
});
|
});
|
||||||
done();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
// Run 'gulp' to build everything at once
|
// Run 'gulp' to build everything at once
|
||||||
let tasks = [];
|
var tasks = [];
|
||||||
if (enabled.cleanup) tasks.push('cleanup');
|
if (enabled.cleanup) tasks.push('cleanup');
|
||||||
// gulp.task('default', gulp.parallel('styles', 'templates', 'scripts', 'scripts_tutti'));
|
gulp.task('default', tasks.concat([
|
||||||
|
|
||||||
gulp.task('default', gulp.parallel(tasks.concat([
|
|
||||||
'styles',
|
'styles',
|
||||||
'templates',
|
'templates',
|
||||||
'scripts',
|
'scripts',
|
||||||
'scripts_concat_tutti',
|
'scripts_concat_tutti',
|
||||||
'scripts_move_vendor',
|
'scripts_concat_markdown',
|
||||||
'scripts_browserify',
|
]));
|
||||||
])));
|
|
||||||
|
180
jest.config.js
180
jest.config.js
@ -1,180 +0,0 @@
|
|||||||
// For a detailed explanation regarding each configuration property, visit:
|
|
||||||
// https://jestjs.io/docs/en/configuration.html
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
// All imported modules in your tests should be mocked automatically
|
|
||||||
// automock: false,
|
|
||||||
|
|
||||||
// Stop running tests after the first failure
|
|
||||||
// bail: false,
|
|
||||||
|
|
||||||
// Respect "browser" field in package.json when resolving modules
|
|
||||||
// browser: false,
|
|
||||||
|
|
||||||
// The directory where Jest should store its cached dependency information
|
|
||||||
// cacheDirectory: "/tmp/jest_rs",
|
|
||||||
|
|
||||||
// Automatically clear mock calls and instances between every test
|
|
||||||
clearMocks: true,
|
|
||||||
|
|
||||||
// Indicates whether the coverage information should be collected while executing the test
|
|
||||||
// collectCoverage: false,
|
|
||||||
|
|
||||||
// An array of glob patterns indicating a set of files for which coverage information should be collected
|
|
||||||
// collectCoverageFrom: null,
|
|
||||||
|
|
||||||
// The directory where Jest should output its coverage files
|
|
||||||
// coverageDirectory: null,
|
|
||||||
|
|
||||||
// An array of regexp pattern strings used to skip coverage collection
|
|
||||||
// coveragePathIgnorePatterns: [
|
|
||||||
// "/node_modules/"
|
|
||||||
// ],
|
|
||||||
|
|
||||||
// A list of reporter names that Jest uses when writing coverage reports
|
|
||||||
// coverageReporters: [
|
|
||||||
// "json",
|
|
||||||
// "text",
|
|
||||||
// "lcov",
|
|
||||||
// "clover"
|
|
||||||
// ],
|
|
||||||
|
|
||||||
// An object that configures minimum threshold enforcement for coverage results
|
|
||||||
// coverageThreshold: null,
|
|
||||||
|
|
||||||
// Make calling deprecated APIs throw helpful error messages
|
|
||||||
// errorOnDeprecated: false,
|
|
||||||
|
|
||||||
// Force coverage collection from ignored files usin a array of glob patterns
|
|
||||||
// forceCoverageMatch: [],
|
|
||||||
|
|
||||||
// A path to a module which exports an async function that is triggered once before all test suites
|
|
||||||
// globalSetup: null,
|
|
||||||
|
|
||||||
// A path to a module which exports an async function that is triggered once after all test suites
|
|
||||||
// globalTeardown: null,
|
|
||||||
|
|
||||||
// A set of global variables that need to be available in all test environments
|
|
||||||
// globals: {},
|
|
||||||
|
|
||||||
// An array of directory names to be searched recursively up from the requiring module's location
|
|
||||||
// moduleDirectories: [
|
|
||||||
// "node_modules"
|
|
||||||
// ],
|
|
||||||
|
|
||||||
// An array of file extensions your modules use
|
|
||||||
// moduleFileExtensions: [
|
|
||||||
// "js",
|
|
||||||
// "json",
|
|
||||||
// "jsx",
|
|
||||||
// "node"
|
|
||||||
// ],
|
|
||||||
|
|
||||||
// A map from regular expressions to module names that allow to stub out resources with a single module
|
|
||||||
// moduleNameMapper: {},
|
|
||||||
|
|
||||||
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
|
|
||||||
// modulePathIgnorePatterns: [],
|
|
||||||
|
|
||||||
// Activates notifications for test results
|
|
||||||
// notify: false,
|
|
||||||
|
|
||||||
// An enum that specifies notification mode. Requires { notify: true }
|
|
||||||
// notifyMode: "always",
|
|
||||||
|
|
||||||
// A preset that is used as a base for Jest's configuration
|
|
||||||
// preset: null,
|
|
||||||
|
|
||||||
// Run tests from one or more projects
|
|
||||||
// projects: null,
|
|
||||||
|
|
||||||
// Use this configuration option to add custom reporters to Jest
|
|
||||||
// reporters: undefined,
|
|
||||||
|
|
||||||
// Automatically reset mock state between every test
|
|
||||||
// resetMocks: false,
|
|
||||||
|
|
||||||
// Reset the module registry before running each individual test
|
|
||||||
// resetModules: false,
|
|
||||||
|
|
||||||
// A path to a custom resolver
|
|
||||||
// resolver: null,
|
|
||||||
|
|
||||||
// Automatically restore mock state between every test
|
|
||||||
// restoreMocks: false,
|
|
||||||
|
|
||||||
// The root directory that Jest should scan for tests and modules within
|
|
||||||
// rootDir: null,
|
|
||||||
|
|
||||||
// A list of paths to directories that Jest should use to search for files in
|
|
||||||
// roots: [
|
|
||||||
// "<rootDir>"
|
|
||||||
// ],
|
|
||||||
|
|
||||||
// Allows you to use a custom runner instead of Jest's default test runner
|
|
||||||
// runner: "jest-runner",
|
|
||||||
|
|
||||||
// The paths to modules that run some code to configure or set up the testing environment before each test
|
|
||||||
setupFiles: ["<rootDir>/src/scripts/js/es6/test_config/test-env.js"],
|
|
||||||
|
|
||||||
// The path to a module that runs some code to configure or set up the testing framework before each test
|
|
||||||
// setupTestFrameworkScriptFile: null,
|
|
||||||
|
|
||||||
// A list of paths to snapshot serializer modules Jest should use for snapshot testing
|
|
||||||
// snapshotSerializers: [],
|
|
||||||
|
|
||||||
// The test environment that will be used for testing
|
|
||||||
testEnvironment: "jsdom",
|
|
||||||
|
|
||||||
// Options that will be passed to the testEnvironment
|
|
||||||
// testEnvironmentOptions: {},
|
|
||||||
|
|
||||||
// Adds a location field to test results
|
|
||||||
// testLocationInResults: false,
|
|
||||||
|
|
||||||
// The glob patterns Jest uses to detect test files
|
|
||||||
// testMatch: [
|
|
||||||
// "**/__tests__/**/*.js?(x)",
|
|
||||||
// "**/?(*.)+(spec|test).js?(x)"
|
|
||||||
// ],
|
|
||||||
|
|
||||||
// An array of regexp pattern strings that are matched against all test paths, matched tests are skipped
|
|
||||||
// testPathIgnorePatterns: [
|
|
||||||
// "/node_modules/"
|
|
||||||
// ],
|
|
||||||
|
|
||||||
// The regexp pattern Jest uses to detect test files
|
|
||||||
// testRegex: "",
|
|
||||||
|
|
||||||
// This option allows the use of a custom results processor
|
|
||||||
// testResultsProcessor: null,
|
|
||||||
|
|
||||||
// This option allows use of a custom test runner
|
|
||||||
// testRunner: "jasmine2",
|
|
||||||
|
|
||||||
// This option sets the URL for the jsdom environment. It is reflected in properties such as location.href
|
|
||||||
// testURL: "http://localhost",
|
|
||||||
|
|
||||||
// Setting this value to "fake" allows the use of fake timers for functions such as "setTimeout"
|
|
||||||
// timers: "real",
|
|
||||||
|
|
||||||
// A map from regular expressions to paths to transformers
|
|
||||||
// transform: null,
|
|
||||||
|
|
||||||
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
|
|
||||||
// transformIgnorePatterns: [
|
|
||||||
// "/node_modules/"
|
|
||||||
// ],
|
|
||||||
|
|
||||||
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
|
|
||||||
// unmockedModulePathPatterns: undefined,
|
|
||||||
|
|
||||||
// Indicates whether each individual test should be reported during the run
|
|
||||||
// verbose: null,
|
|
||||||
|
|
||||||
// An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode
|
|
||||||
// watchPathIgnorePatterns: [],
|
|
||||||
|
|
||||||
// Whether to use watchman for file crawling
|
|
||||||
// watchman: true,
|
|
||||||
};
|
|
9877
package-lock.json
generated
9877
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
60
package.json
60
package.json
@ -4,51 +4,23 @@
|
|||||||
"author": "Blender Institute",
|
"author": "Blender Institute",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "git://git.blender.org/pillar.git"
|
"url": "https://github.com/armadillica/pillar.git"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@babel/core": "7.1.6",
|
"gulp": "~3.9.1",
|
||||||
"@babel/preset-env": "7.1.6",
|
"gulp-autoprefixer": "~2.3.1",
|
||||||
"acorn": "5.7.3",
|
"gulp-cached": "~1.1.0",
|
||||||
"babel-core": "7.0.0-bridge.0",
|
"gulp-chmod": "~1.3.0",
|
||||||
"babelify": "10.0.0",
|
"gulp-concat": "~2.6.0",
|
||||||
"browserify": "16.2.3",
|
"gulp-if": "^2.0.1",
|
||||||
"gulp": "4.0.0",
|
"gulp-git": "~2.4.2",
|
||||||
"gulp-autoprefixer": "6.0.0",
|
"gulp-livereload": "~3.8.1",
|
||||||
"gulp-babel": "8.0.0",
|
"gulp-plumber": "~1.1.0",
|
||||||
"gulp-cached": "1.1.1",
|
"gulp-pug": "~3.2.0",
|
||||||
"gulp-chmod": "2.0.0",
|
"gulp-rename": "~1.2.2",
|
||||||
"gulp-concat": "2.6.1",
|
"gulp-sass": "~2.3.1",
|
||||||
"gulp-git": "2.8.0",
|
"gulp-sourcemaps": "~1.6.0",
|
||||||
"gulp-if": "2.0.2",
|
"gulp-uglify": "~1.5.3",
|
||||||
"gulp-livereload": "4.0.0",
|
"minimist": "^1.2.0"
|
||||||
"gulp-plumber": "1.2.0",
|
|
||||||
"gulp-pug": "4.0.1",
|
|
||||||
"gulp-rename": "1.4.0",
|
|
||||||
"gulp-sass": "4.1.0",
|
|
||||||
"gulp-sourcemaps": "2.6.4",
|
|
||||||
"gulp-uglify-es": "1.0.4",
|
|
||||||
"jest": "^24.8.0",
|
|
||||||
"minimist": "1.2.0",
|
|
||||||
"vinyl-buffer": "1.0.1",
|
|
||||||
"vinyl-source-stream": "2.0.0"
|
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"bootstrap": "^4.3.1",
|
|
||||||
"glob": "7.1.3",
|
|
||||||
"jquery": "^3.4.1",
|
|
||||||
"natives": "^1.1.6",
|
|
||||||
"popper.js": "1.14.4",
|
|
||||||
"video.js": "7.2.2",
|
|
||||||
"vue": "2.5.17"
|
|
||||||
},
|
|
||||||
"scripts": {
|
|
||||||
"test": "jest"
|
|
||||||
},
|
|
||||||
"__COMMENTS__": [
|
|
||||||
"natives@1.1.6 for Gulp 3.x on Node 10.x: https://github.com/gulpjs/gulp/issues/2162#issuecomment-385197164"
|
|
||||||
],
|
|
||||||
"resolutions": {
|
|
||||||
"natives": "1.1.6"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -12,25 +12,10 @@ import typing
|
|||||||
import os
|
import os
|
||||||
import os.path
|
import os.path
|
||||||
import pathlib
|
import pathlib
|
||||||
import warnings
|
|
||||||
|
|
||||||
# These warnings have to be suppressed before the first import.
|
|
||||||
|
|
||||||
# Eve is falling behind on Cerberus. See https://github.com/pyeve/eve/issues/1278
|
|
||||||
warnings.filterwarnings(
|
|
||||||
'ignore', category=DeprecationWarning,
|
|
||||||
message="Methods for type testing are deprecated, use TypeDefinition and the "
|
|
||||||
"'types_mapping'-property of a Validator-instance instead")
|
|
||||||
|
|
||||||
# Werkzeug deprecated Request.is_xhr, but it works fine with jQuery and we don't need a reminder
|
|
||||||
# every time a unit test is run.
|
|
||||||
warnings.filterwarnings('ignore', category=DeprecationWarning,
|
|
||||||
message="'Request.is_xhr' is deprecated as of version 0.13 and will be "
|
|
||||||
"removed in version 1.0.")
|
|
||||||
|
|
||||||
import jinja2
|
import jinja2
|
||||||
import flask
|
|
||||||
from eve import Eve
|
from eve import Eve
|
||||||
|
import flask
|
||||||
from flask import g, render_template, request
|
from flask import g, render_template, request
|
||||||
from flask_babel import Babel, gettext as _
|
from flask_babel import Babel, gettext as _
|
||||||
from flask.templating import TemplateNotFound
|
from flask.templating import TemplateNotFound
|
||||||
@ -85,7 +70,7 @@ class BlinkerCompatibleEve(Eve):
|
|||||||
|
|
||||||
|
|
||||||
class PillarServer(BlinkerCompatibleEve):
|
class PillarServer(BlinkerCompatibleEve):
|
||||||
def __init__(self, app_root: str, **kwargs) -> None:
|
def __init__(self, app_root, **kwargs):
|
||||||
from .extension import PillarExtension
|
from .extension import PillarExtension
|
||||||
from celery import Celery
|
from celery import Celery
|
||||||
from flask_wtf.csrf import CSRFProtect
|
from flask_wtf.csrf import CSRFProtect
|
||||||
@ -155,6 +140,8 @@ class PillarServer(BlinkerCompatibleEve):
|
|||||||
|
|
||||||
self.org_manager = pillar.api.organizations.OrgManager()
|
self.org_manager = pillar.api.organizations.OrgManager()
|
||||||
|
|
||||||
|
self.before_first_request(self.setup_db_indices)
|
||||||
|
|
||||||
# Make CSRF protection available to the application. By default it is
|
# Make CSRF protection available to the application. By default it is
|
||||||
# disabled on all endpoints. More info at WTF_CSRF_CHECK_DEFAULT in config.py
|
# disabled on all endpoints. More info at WTF_CSRF_CHECK_DEFAULT in config.py
|
||||||
self.csrf = CSRFProtect(self)
|
self.csrf = CSRFProtect(self)
|
||||||
@ -293,7 +280,7 @@ class PillarServer(BlinkerCompatibleEve):
|
|||||||
self.encoding_service_client = Zencoder(self.config['ZENCODER_API_KEY'])
|
self.encoding_service_client = Zencoder(self.config['ZENCODER_API_KEY'])
|
||||||
|
|
||||||
def _config_caching(self):
|
def _config_caching(self):
|
||||||
from flask_caching import Cache
|
from flask_cache import Cache
|
||||||
self.cache = Cache(self)
|
self.cache = Cache(self)
|
||||||
|
|
||||||
def set_languages(self, translations_folder: pathlib.Path):
|
def set_languages(self, translations_folder: pathlib.Path):
|
||||||
@ -492,12 +479,10 @@ class PillarServer(BlinkerCompatibleEve):
|
|||||||
|
|
||||||
# Pillar-defined Celery task modules:
|
# Pillar-defined Celery task modules:
|
||||||
celery_task_modules = [
|
celery_task_modules = [
|
||||||
'pillar.celery.avatar',
|
|
||||||
'pillar.celery.badges',
|
|
||||||
'pillar.celery.email_tasks',
|
|
||||||
'pillar.celery.file_link_tasks',
|
|
||||||
'pillar.celery.search_index_tasks',
|
|
||||||
'pillar.celery.tasks',
|
'pillar.celery.tasks',
|
||||||
|
'pillar.celery.search_index_tasks',
|
||||||
|
'pillar.celery.file_link_tasks',
|
||||||
|
'pillar.celery.email_tasks',
|
||||||
]
|
]
|
||||||
|
|
||||||
# Allow Pillar extensions from defining their own Celery tasks.
|
# Allow Pillar extensions from defining their own Celery tasks.
|
||||||
@ -663,7 +648,7 @@ class PillarServer(BlinkerCompatibleEve):
|
|||||||
return self.pillar_error_handler(error)
|
return self.pillar_error_handler(error)
|
||||||
|
|
||||||
def handle_sdk_resource_invalid(self, error):
|
def handle_sdk_resource_invalid(self, error):
|
||||||
self.log.exception('Forwarding ResourceInvalid exception to client: %s', error, exc_info=True)
|
self.log.info('Forwarding ResourceInvalid exception to client: %s', error, exc_info=True)
|
||||||
|
|
||||||
# Raising a Werkzeug 422 exception doens't work, as Flask turns it into a 500.
|
# Raising a Werkzeug 422 exception doens't work, as Flask turns it into a 500.
|
||||||
return _('The submitted data could not be validated.'), 422
|
return _('The submitted data could not be validated.'), 422
|
||||||
@ -719,8 +704,6 @@ class PillarServer(BlinkerCompatibleEve):
|
|||||||
def finish_startup(self):
|
def finish_startup(self):
|
||||||
self.log.info('Using MongoDB database %r', self.config['MONGO_DBNAME'])
|
self.log.info('Using MongoDB database %r', self.config['MONGO_DBNAME'])
|
||||||
|
|
||||||
with self.app_context():
|
|
||||||
self.setup_db_indices()
|
|
||||||
self._config_celery()
|
self._config_celery()
|
||||||
|
|
||||||
api.setup_app(self)
|
api.setup_app(self)
|
||||||
@ -728,10 +711,6 @@ class PillarServer(BlinkerCompatibleEve):
|
|||||||
|
|
||||||
authentication.setup_app(self)
|
authentication.setup_app(self)
|
||||||
|
|
||||||
# Register Flask Debug Toolbar (disabled by default).
|
|
||||||
from flask_debugtoolbar import DebugToolbarExtension
|
|
||||||
DebugToolbarExtension(self)
|
|
||||||
|
|
||||||
for ext in self.pillar_extensions.values():
|
for ext in self.pillar_extensions.values():
|
||||||
self.log.info('Setting up extension %s', ext.name)
|
self.log.info('Setting up extension %s', ext.name)
|
||||||
ext.setup_app(self)
|
ext.setup_app(self)
|
||||||
@ -742,7 +721,6 @@ class PillarServer(BlinkerCompatibleEve):
|
|||||||
self._config_user_caps()
|
self._config_user_caps()
|
||||||
|
|
||||||
# Only enable this when debugging.
|
# Only enable this when debugging.
|
||||||
# TODO(fsiddi): Consider removing this in favor of the routes tab in Flask Debug Toolbar.
|
|
||||||
# self._list_routes()
|
# self._list_routes()
|
||||||
|
|
||||||
def setup_db_indices(self):
|
def setup_db_indices(self):
|
||||||
@ -782,8 +760,6 @@ class PillarServer(BlinkerCompatibleEve):
|
|||||||
coll.create_index([('properties.status', pymongo.ASCENDING),
|
coll.create_index([('properties.status', pymongo.ASCENDING),
|
||||||
('node_type', pymongo.ASCENDING),
|
('node_type', pymongo.ASCENDING),
|
||||||
('_created', pymongo.DESCENDING)])
|
('_created', pymongo.DESCENDING)])
|
||||||
# Used for asset tags
|
|
||||||
coll.create_index([('properties.tags', pymongo.ASCENDING)])
|
|
||||||
|
|
||||||
coll = db['projects']
|
coll = db['projects']
|
||||||
# This index is used for statistics, and for fetching public projects.
|
# This index is used for statistics, and for fetching public projects.
|
||||||
@ -806,18 +782,17 @@ class PillarServer(BlinkerCompatibleEve):
|
|||||||
return 'basic ' + base64.b64encode('%s:%s' % (username, subclient_id))
|
return 'basic ' + base64.b64encode('%s:%s' % (username, subclient_id))
|
||||||
|
|
||||||
def post_internal(self, resource: str, payl=None, skip_validation=False):
|
def post_internal(self, resource: str, payl=None, skip_validation=False):
|
||||||
"""Workaround for Eve issue https://github.com/pyeve/eve/issues/810"""
|
"""Workaround for Eve issue https://github.com/nicolaiarocci/eve/issues/810"""
|
||||||
from eve.methods.post import post_internal
|
from eve.methods.post import post_internal
|
||||||
|
|
||||||
url = self.config['URLS'][resource]
|
url = self.config['URLS'][resource]
|
||||||
path = '%s/%s' % (self.api_prefix, url)
|
path = '%s/%s' % (self.api_prefix, url)
|
||||||
|
|
||||||
with self.__fake_request_url_rule('POST', path):
|
with self.__fake_request_url_rule('POST', path):
|
||||||
return post_internal(resource, payl=payl, skip_validation=skip_validation)[:4]
|
return post_internal(resource, payl=payl, skip_validation=skip_validation)[:4]
|
||||||
|
|
||||||
def put_internal(self, resource: str, payload=None, concurrency_check=False,
|
def put_internal(self, resource: str, payload=None, concurrency_check=False,
|
||||||
skip_validation=False, **lookup):
|
skip_validation=False, **lookup):
|
||||||
"""Workaround for Eve issue https://github.com/pyeve/eve/issues/810"""
|
"""Workaround for Eve issue https://github.com/nicolaiarocci/eve/issues/810"""
|
||||||
from eve.methods.put import put_internal
|
from eve.methods.put import put_internal
|
||||||
|
|
||||||
url = self.config['URLS'][resource]
|
url = self.config['URLS'][resource]
|
||||||
@ -828,7 +803,7 @@ class PillarServer(BlinkerCompatibleEve):
|
|||||||
|
|
||||||
def patch_internal(self, resource: str, payload=None, concurrency_check=False,
|
def patch_internal(self, resource: str, payload=None, concurrency_check=False,
|
||||||
skip_validation=False, **lookup):
|
skip_validation=False, **lookup):
|
||||||
"""Workaround for Eve issue https://github.com/pyeve/eve/issues/810"""
|
"""Workaround for Eve issue https://github.com/nicolaiarocci/eve/issues/810"""
|
||||||
from eve.methods.patch import patch_internal
|
from eve.methods.patch import patch_internal
|
||||||
|
|
||||||
url = self.config['URLS'][resource]
|
url = self.config['URLS'][resource]
|
||||||
@ -839,7 +814,7 @@ class PillarServer(BlinkerCompatibleEve):
|
|||||||
|
|
||||||
def delete_internal(self, resource: str, concurrency_check=False,
|
def delete_internal(self, resource: str, concurrency_check=False,
|
||||||
suppress_callbacks=False, **lookup):
|
suppress_callbacks=False, **lookup):
|
||||||
"""Workaround for Eve issue https://github.com/pyeve/eve/issues/810"""
|
"""Workaround for Eve issue https://github.com/nicolaiarocci/eve/issues/810"""
|
||||||
from eve.methods.delete import deleteitem_internal
|
from eve.methods.delete import deleteitem_internal
|
||||||
|
|
||||||
url = self.config['URLS'][resource]
|
url = self.config['URLS'][resource]
|
||||||
@ -920,8 +895,7 @@ class PillarServer(BlinkerCompatibleEve):
|
|||||||
|
|
||||||
yield ctx
|
yield ctx
|
||||||
|
|
||||||
def validator_for_resource(self,
|
def validator_for_resource(self, resource_name: str) -> custom_field_validation.ValidateCustomFields:
|
||||||
resource_name: str) -> custom_field_validation.ValidateCustomFields:
|
|
||||||
schema = self.config['DOMAIN'][resource_name]['schema']
|
schema = self.config['DOMAIN'][resource_name]['schema']
|
||||||
validator = self.validator(schema, resource_name)
|
validator = self.validator(schema, resource_name)
|
||||||
return validator
|
return validator
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
def setup_app(app):
|
def setup_app(app):
|
||||||
from . import encoding, blender_id, projects, local_auth, file_storage
|
from . import encoding, blender_id, projects, local_auth, file_storage
|
||||||
from . import users, nodes, latest, blender_cloud, service, activities, timeline
|
from . import users, nodes, latest, blender_cloud, service, activities
|
||||||
from . import organizations
|
from . import organizations
|
||||||
from . import search
|
from . import search
|
||||||
|
|
||||||
@ -11,7 +11,6 @@ def setup_app(app):
|
|||||||
local_auth.setup_app(app, url_prefix='/auth')
|
local_auth.setup_app(app, url_prefix='/auth')
|
||||||
file_storage.setup_app(app, url_prefix='/storage')
|
file_storage.setup_app(app, url_prefix='/storage')
|
||||||
latest.setup_app(app, url_prefix='/latest')
|
latest.setup_app(app, url_prefix='/latest')
|
||||||
timeline.setup_app(app, url_prefix='/timeline')
|
|
||||||
blender_cloud.setup_app(app, url_prefix='/bcloud')
|
blender_cloud.setup_app(app, url_prefix='/bcloud')
|
||||||
users.setup_app(app, api_prefix='/users')
|
users.setup_app(app, api_prefix='/users')
|
||||||
service.setup_app(app, api_prefix='/service')
|
service.setup_app(app, api_prefix='/service')
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
from flask import request, current_app
|
from flask import request, current_app
|
||||||
import pillar.api.users.avatar
|
from pillar.api.utils import gravatar
|
||||||
from pillar.auth import current_user
|
from pillar.auth import current_user
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
@ -68,7 +68,7 @@ def notification_parse(notification):
|
|||||||
if actor:
|
if actor:
|
||||||
parsed_actor = {
|
parsed_actor = {
|
||||||
'username': actor['username'],
|
'username': actor['username'],
|
||||||
'avatar': pillar.api.users.avatar.url(actor)}
|
'avatar': gravatar(actor['email'])}
|
||||||
else:
|
else:
|
||||||
parsed_actor = None
|
parsed_actor = None
|
||||||
|
|
||||||
@ -91,14 +91,14 @@ def notification_parse(notification):
|
|||||||
|
|
||||||
|
|
||||||
def notification_get_subscriptions(context_object_type, context_object_id, actor_user_id):
|
def notification_get_subscriptions(context_object_type, context_object_id, actor_user_id):
|
||||||
subscriptions_collection = current_app.db('activities-subscriptions')
|
subscriptions_collection = current_app.data.driver.db['activities-subscriptions']
|
||||||
lookup = {
|
lookup = {
|
||||||
'user': {"$ne": actor_user_id},
|
'user': {"$ne": actor_user_id},
|
||||||
'context_object_type': context_object_type,
|
'context_object_type': context_object_type,
|
||||||
'context_object': context_object_id,
|
'context_object': context_object_id,
|
||||||
'is_subscribed': True,
|
'is_subscribed': True,
|
||||||
}
|
}
|
||||||
return subscriptions_collection.find(lookup), subscriptions_collection.count_documents(lookup)
|
return subscriptions_collection.find(lookup)
|
||||||
|
|
||||||
|
|
||||||
def activity_subscribe(user_id, context_object_type, context_object_id):
|
def activity_subscribe(user_id, context_object_type, context_object_id):
|
||||||
@ -119,8 +119,6 @@ def activity_subscribe(user_id, context_object_type, context_object_id):
|
|||||||
|
|
||||||
# If no subscription exists, we create one
|
# If no subscription exists, we create one
|
||||||
if not subscription:
|
if not subscription:
|
||||||
# Workaround for issue: https://github.com/pyeve/eve/issues/1174
|
|
||||||
lookup['notifications'] = {}
|
|
||||||
current_app.post_internal('activities-subscriptions', lookup)
|
current_app.post_internal('activities-subscriptions', lookup)
|
||||||
|
|
||||||
|
|
||||||
@ -140,10 +138,10 @@ def activity_object_add(actor_user_id, verb, object_type, object_id,
|
|||||||
:param object_id: object id, to be traced with object_type_id
|
:param object_id: object id, to be traced with object_type_id
|
||||||
"""
|
"""
|
||||||
|
|
||||||
subscriptions, subscription_count = notification_get_subscriptions(
|
subscriptions = notification_get_subscriptions(
|
||||||
context_object_type, context_object_id, actor_user_id)
|
context_object_type, context_object_id, actor_user_id)
|
||||||
|
|
||||||
if subscription_count == 0:
|
if subscriptions.count() == 0:
|
||||||
return
|
return
|
||||||
|
|
||||||
info, status = register_activity(actor_user_id, verb, object_type, object_id,
|
info, status = register_activity(actor_user_id, verb, object_type, object_id,
|
||||||
|
@ -257,10 +257,10 @@ def has_home_project(user_id):
|
|||||||
"""Returns True iff the user has a home project."""
|
"""Returns True iff the user has a home project."""
|
||||||
|
|
||||||
proj_coll = current_app.data.driver.db['projects']
|
proj_coll = current_app.data.driver.db['projects']
|
||||||
return proj_coll.count_documents({'user': user_id, 'category': 'home', '_deleted': False}) > 0
|
return proj_coll.count({'user': user_id, 'category': 'home', '_deleted': False}) > 0
|
||||||
|
|
||||||
|
|
||||||
def get_home_project(user_id: ObjectId, projection=None) -> dict:
|
def get_home_project(user_id, projection=None):
|
||||||
"""Returns the home project"""
|
"""Returns the home project"""
|
||||||
|
|
||||||
proj_coll = current_app.data.driver.db['projects']
|
proj_coll = current_app.data.driver.db['projects']
|
||||||
@ -272,7 +272,7 @@ def is_home_project(project_id, user_id):
|
|||||||
"""Returns True iff the given project exists and is the user's home project."""
|
"""Returns True iff the given project exists and is the user's home project."""
|
||||||
|
|
||||||
proj_coll = current_app.data.driver.db['projects']
|
proj_coll = current_app.data.driver.db['projects']
|
||||||
return proj_coll.count_documents({'_id': project_id,
|
return proj_coll.count({'_id': project_id,
|
||||||
'user': user_id,
|
'user': user_id,
|
||||||
'category': 'home',
|
'category': 'home',
|
||||||
'_deleted': False}) > 0
|
'_deleted': False}) > 0
|
||||||
|
@ -104,7 +104,7 @@ def has_texture_node(proj, return_hdri=True):
|
|||||||
if return_hdri:
|
if return_hdri:
|
||||||
node_types.append('group_hdri')
|
node_types.append('group_hdri')
|
||||||
|
|
||||||
count = nodes_collection.count_documents(
|
count = nodes_collection.count(
|
||||||
{'node_type': {'$in': node_types},
|
{'node_type': {'$in': node_types},
|
||||||
'project': proj['_id'],
|
'project': proj['_id'],
|
||||||
'parent': None})
|
'parent': None})
|
||||||
|
@ -6,17 +6,14 @@ with Blender ID.
|
|||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
import logging
|
import logging
|
||||||
from urllib.parse import urljoin
|
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
from bson import tz_util
|
from bson import tz_util
|
||||||
from rauth import OAuth2Session
|
from rauth import OAuth2Session
|
||||||
from flask import Blueprint, request, jsonify, session
|
from flask import Blueprint, request, jsonify, session
|
||||||
from requests.adapters import HTTPAdapter
|
from requests.adapters import HTTPAdapter
|
||||||
import urllib3.util.retry
|
|
||||||
|
|
||||||
from pillar import current_app
|
from pillar import current_app
|
||||||
from pillar.auth import get_blender_id_oauth_token
|
|
||||||
from pillar.api.utils import authentication, utcnow
|
from pillar.api.utils import authentication, utcnow
|
||||||
from pillar.api.utils.authentication import find_user_in_db, upsert_user
|
from pillar.api.utils.authentication import find_user_in_db, upsert_user
|
||||||
|
|
||||||
@ -31,30 +28,6 @@ class LogoutUser(Exception):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
class Session(requests.Session):
|
|
||||||
"""Requests Session suitable for Blender ID communication."""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
super().__init__()
|
|
||||||
|
|
||||||
retries = urllib3.util.retry.Retry(
|
|
||||||
total=10,
|
|
||||||
backoff_factor=0.05,
|
|
||||||
)
|
|
||||||
http_adapter = requests.adapters.HTTPAdapter(max_retries=retries)
|
|
||||||
|
|
||||||
self.mount('https://', http_adapter)
|
|
||||||
self.mount('http://', http_adapter)
|
|
||||||
|
|
||||||
def authenticate(self):
|
|
||||||
"""Attach the current user's authentication token to the request."""
|
|
||||||
bid_token = get_blender_id_oauth_token()
|
|
||||||
if not bid_token:
|
|
||||||
raise TypeError('authenticate() requires current user to be logged in with Blender ID')
|
|
||||||
|
|
||||||
self.headers['Authorization'] = f'Bearer {bid_token}'
|
|
||||||
|
|
||||||
|
|
||||||
@blender_id.route('/store_scst', methods=['POST'])
|
@blender_id.route('/store_scst', methods=['POST'])
|
||||||
def store_subclient_token():
|
def store_subclient_token():
|
||||||
"""Verifies & stores a user's subclient-specific token."""
|
"""Verifies & stores a user's subclient-specific token."""
|
||||||
@ -141,12 +114,15 @@ def validate_token(user_id, token, oauth_subclient_id):
|
|||||||
# We only want to accept Blender Cloud tokens.
|
# We only want to accept Blender Cloud tokens.
|
||||||
payload['client_id'] = current_app.config['OAUTH_CREDENTIALS']['blender-id']['id']
|
payload['client_id'] = current_app.config['OAUTH_CREDENTIALS']['blender-id']['id']
|
||||||
|
|
||||||
blender_id_endpoint = current_app.config['BLENDER_ID_ENDPOINT']
|
url = '{0}/u/validate_token'.format(current_app.config['BLENDER_ID_ENDPOINT'])
|
||||||
url = urljoin(blender_id_endpoint, 'u/validate_token')
|
|
||||||
log.debug('POSTing to %r', url)
|
log.debug('POSTing to %r', url)
|
||||||
|
|
||||||
|
# Retry a few times when POSTing to BlenderID fails.
|
||||||
|
# Source: http://stackoverflow.com/a/15431343/875379
|
||||||
|
s = requests.Session()
|
||||||
|
s.mount(current_app.config['BLENDER_ID_ENDPOINT'], HTTPAdapter(max_retries=5))
|
||||||
|
|
||||||
# POST to Blender ID, handling errors as negative verification results.
|
# POST to Blender ID, handling errors as negative verification results.
|
||||||
s = Session()
|
|
||||||
try:
|
try:
|
||||||
r = s.post(url, data=payload, timeout=5,
|
r = s.post(url, data=payload, timeout=5,
|
||||||
verify=current_app.config['TLS_CERT_FILE'])
|
verify=current_app.config['TLS_CERT_FILE'])
|
||||||
@ -242,7 +218,7 @@ def fetch_blenderid_user() -> dict:
|
|||||||
|
|
||||||
my_log = log.getChild('fetch_blenderid_user')
|
my_log = log.getChild('fetch_blenderid_user')
|
||||||
|
|
||||||
bid_url = urljoin(current_app.config['BLENDER_ID_ENDPOINT'], 'api/user')
|
bid_url = '%s/api/user' % current_app.config['BLENDER_ID_ENDPOINT']
|
||||||
my_log.debug('Fetching user info from %s', bid_url)
|
my_log.debug('Fetching user info from %s', bid_url)
|
||||||
|
|
||||||
credentials = current_app.config['OAUTH_CREDENTIALS']['blender-id']
|
credentials = current_app.config['OAUTH_CREDENTIALS']['blender-id']
|
||||||
@ -280,16 +256,6 @@ def fetch_blenderid_user() -> dict:
|
|||||||
return payload
|
return payload
|
||||||
|
|
||||||
|
|
||||||
def avatar_url(blenderid_user_id: str) -> str:
|
|
||||||
"""Return the URL to the user's avatar on Blender ID.
|
|
||||||
|
|
||||||
This avatar should be downloaded, and not served from the Blender ID URL.
|
|
||||||
"""
|
|
||||||
bid_url = urljoin(current_app.config['BLENDER_ID_ENDPOINT'],
|
|
||||||
f'api/user/{blenderid_user_id}/avatar')
|
|
||||||
return bid_url
|
|
||||||
|
|
||||||
|
|
||||||
def setup_app(app, url_prefix):
|
def setup_app(app, url_prefix):
|
||||||
app.register_api_blueprint(blender_id, url_prefix=url_prefix)
|
app.register_api_blueprint(blender_id, url_prefix=url_prefix)
|
||||||
|
|
||||||
@ -297,7 +263,7 @@ def setup_app(app, url_prefix):
|
|||||||
def switch_user_url(next_url: str) -> str:
|
def switch_user_url(next_url: str) -> str:
|
||||||
from urllib.parse import quote
|
from urllib.parse import quote
|
||||||
|
|
||||||
base_url = urljoin(current_app.config['BLENDER_ID_ENDPOINT'], 'switch')
|
base_url = '%s/switch' % current_app.config['BLENDER_ID_ENDPOINT']
|
||||||
if next_url:
|
if next_url:
|
||||||
return '%s?next=%s' % (base_url, quote(next_url))
|
return '%s?next=%s' % (base_url, quote(next_url))
|
||||||
return base_url
|
return base_url
|
||||||
|
@ -1,17 +1,17 @@
|
|||||||
from datetime import datetime
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from bson import ObjectId, tz_util
|
from bson import ObjectId, tz_util
|
||||||
|
from datetime import datetime
|
||||||
|
import cerberus.errors
|
||||||
from eve.io.mongo import Validator
|
from eve.io.mongo import Validator
|
||||||
from flask import current_app
|
from flask import current_app
|
||||||
|
|
||||||
from pillar import markdown
|
import pillar.markdown
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class ValidateCustomFields(Validator):
|
class ValidateCustomFields(Validator):
|
||||||
|
|
||||||
# TODO: split this into a convert_property(property, schema) and call that from this function.
|
# TODO: split this into a convert_property(property, schema) and call that from this function.
|
||||||
def convert_properties(self, properties, node_schema):
|
def convert_properties(self, properties, node_schema):
|
||||||
"""Converts datetime strings and ObjectId strings to actual Python objects."""
|
"""Converts datetime strings and ObjectId strings to actual Python objects."""
|
||||||
@ -29,11 +29,7 @@ class ValidateCustomFields(Validator):
|
|||||||
dict_valueschema = schema_prop['schema']
|
dict_valueschema = schema_prop['schema']
|
||||||
properties[prop] = self.convert_properties(properties[prop], dict_valueschema)
|
properties[prop] = self.convert_properties(properties[prop], dict_valueschema)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
# Cerberus 1.3 changed valueschema to valuesrules.
|
dict_valueschema = schema_prop['valueschema']
|
||||||
dict_valueschema = schema_prop.get('valuesrules') or \
|
|
||||||
schema_prop.get('valueschema')
|
|
||||||
if dict_valueschema is None:
|
|
||||||
raise KeyError(f"missing 'valuesrules' key in schema of property {prop}")
|
|
||||||
self.convert_dict_values(properties[prop], dict_valueschema)
|
self.convert_dict_values(properties[prop], dict_valueschema)
|
||||||
|
|
||||||
elif prop_type == 'list':
|
elif prop_type == 'list':
|
||||||
@ -77,11 +73,6 @@ class ValidateCustomFields(Validator):
|
|||||||
dict_property[key] = self.convert_properties(item_prop, item_schema)['item']
|
dict_property[key] = self.convert_properties(item_prop, item_schema)['item']
|
||||||
|
|
||||||
def _validate_valid_properties(self, valid_properties, field, value):
|
def _validate_valid_properties(self, valid_properties, field, value):
|
||||||
"""Fake property that triggers node dynamic property validation.
|
|
||||||
|
|
||||||
The rule's arguments are validated against this schema:
|
|
||||||
{'type': 'boolean'}
|
|
||||||
"""
|
|
||||||
from pillar.api.utils import project_get_node_type
|
from pillar.api.utils import project_get_node_type
|
||||||
|
|
||||||
projects_collection = current_app.data.driver.db['projects']
|
projects_collection = current_app.data.driver.db['projects']
|
||||||
@ -116,7 +107,7 @@ class ValidateCustomFields(Validator):
|
|||||||
if val:
|
if val:
|
||||||
# This ensures the modifications made by v's coercion rules are
|
# This ensures the modifications made by v's coercion rules are
|
||||||
# visible to this validator's output.
|
# visible to this validator's output.
|
||||||
self.document[field] = v.document
|
self.current[field] = v.current
|
||||||
return True
|
return True
|
||||||
|
|
||||||
log.warning('Error validating properties for node %s: %s', self.document, v.errors)
|
log.warning('Error validating properties for node %s: %s', self.document, v.errors)
|
||||||
@ -127,9 +118,6 @@ class ValidateCustomFields(Validator):
|
|||||||
|
|
||||||
Combine "required_after_creation=True" with "required=False" to allow
|
Combine "required_after_creation=True" with "required=False" to allow
|
||||||
pre-insert hooks to set default values.
|
pre-insert hooks to set default values.
|
||||||
|
|
||||||
The rule's arguments are validated against this schema:
|
|
||||||
{'type': 'boolean'}
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if not required_after_creation:
|
if not required_after_creation:
|
||||||
@ -137,14 +125,14 @@ class ValidateCustomFields(Validator):
|
|||||||
# validator at all.
|
# validator at all.
|
||||||
return
|
return
|
||||||
|
|
||||||
if self.document_id is None:
|
if self._id is None:
|
||||||
# This is a creation call, in which case this validator shouldn't run.
|
# This is a creation call, in which case this validator shouldn't run.
|
||||||
return
|
return
|
||||||
|
|
||||||
if not value:
|
if not value:
|
||||||
self._error(field, "Value is required once the document was created")
|
self._error(field, "Value is required once the document was created")
|
||||||
|
|
||||||
def _check_with_iprange(self, field_name: str, value: str):
|
def _validate_type_iprange(self, field_name: str, value: str):
|
||||||
"""Ensure the field contains a valid IP address.
|
"""Ensure the field contains a valid IP address.
|
||||||
|
|
||||||
Supports both IPv6 and IPv4 ranges. Requires the IPy module.
|
Supports both IPv6 and IPv4 ranges. Requires the IPy module.
|
||||||
@ -161,19 +149,40 @@ class ValidateCustomFields(Validator):
|
|||||||
if ip.prefixlen() == 0:
|
if ip.prefixlen() == 0:
|
||||||
self._error(field_name, 'Zero-length prefix is not allowed')
|
self._error(field_name, 'Zero-length prefix is not allowed')
|
||||||
|
|
||||||
def _normalize_coerce_markdown(self, markdown_field: str) -> str:
|
def _validate_type_binary(self, field_name: str, value: bytes):
|
||||||
"""
|
"""Add support for binary type.
|
||||||
Cache markdown as html.
|
|
||||||
|
|
||||||
:param markdown_field: name of the field containing Markdown
|
This type was actually introduced in Cerberus 1.0, so we can drop
|
||||||
:return: html string
|
support for this once Eve starts using that version (or newer).
|
||||||
"""
|
"""
|
||||||
my_log = log.getChild('_normalize_coerce_markdown')
|
|
||||||
mdown = self.document.get(markdown_field, '')
|
if not isinstance(value, (bytes, bytearray)):
|
||||||
html = markdown.markdown(mdown)
|
self._error(field_name, f'wrong value type {type(value)}, expected bytes or bytearray')
|
||||||
my_log.debug('Generated html for markdown field %s in doc with id %s',
|
|
||||||
markdown_field, id(self.document))
|
def _validate_coerce(self, coerce, field: str, value):
|
||||||
return html
|
"""Override Cerberus' _validate_coerce method for richer features.
|
||||||
|
|
||||||
|
This now supports named coercion functions (available in Cerberus 1.0+)
|
||||||
|
and passes the field name to coercion functions as well.
|
||||||
|
"""
|
||||||
|
if isinstance(coerce, str):
|
||||||
|
coerce = getattr(self, f'_normalize_coerce_{coerce}')
|
||||||
|
|
||||||
|
try:
|
||||||
|
return coerce(field, value)
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
self._error(field, cerberus.errors.ERROR_COERCION_FAILED.format(field))
|
||||||
|
|
||||||
|
def _normalize_coerce_markdown(self, field: str, value):
|
||||||
|
"""Render Markdown from this field into {field}_html.
|
||||||
|
|
||||||
|
The field name MUST NOT end in `_html`. The Markdown is read from this
|
||||||
|
field and the rendered HTML is written to the field `{field}_html`.
|
||||||
|
"""
|
||||||
|
html = pillar.markdown.markdown(value)
|
||||||
|
field_name = pillar.markdown.cache_field_name(field)
|
||||||
|
self.current[field_name] = html
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
@ -181,12 +190,12 @@ if __name__ == '__main__':
|
|||||||
|
|
||||||
v = ValidateCustomFields()
|
v = ValidateCustomFields()
|
||||||
v.schema = {
|
v.schema = {
|
||||||
'foo': {'type': 'string', 'check_with': 'markdown'},
|
'foo': {'type': 'string', 'coerce': 'markdown'},
|
||||||
'foo_html': {'type': 'string'},
|
'foo_html': {'type': 'string'},
|
||||||
'nested': {
|
'nested': {
|
||||||
'type': 'dict',
|
'type': 'dict',
|
||||||
'schema': {
|
'schema': {
|
||||||
'bar': {'type': 'string', 'check_with': 'markdown'},
|
'bar': {'type': 'string', 'coerce': 'markdown'},
|
||||||
'bar_html': {'type': 'string'},
|
'bar_html': {'type': 'string'},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,8 +1,5 @@
|
|||||||
import os
|
import os
|
||||||
|
|
||||||
from pillar.api.node_types.utils import markdown_fields
|
|
||||||
|
|
||||||
STORAGE_BACKENDS = ["local", "pillar", "cdnsun", "gcs", "unittest"]
|
|
||||||
URL_PREFIX = 'api'
|
URL_PREFIX = 'api'
|
||||||
|
|
||||||
# Enable reads (GET), inserts (POST) and DELETE for resources/collections
|
# Enable reads (GET), inserts (POST) and DELETE for resources/collections
|
||||||
@ -124,62 +121,12 @@ users_schema = {
|
|||||||
'service': {
|
'service': {
|
||||||
'type': 'dict',
|
'type': 'dict',
|
||||||
'allow_unknown': True,
|
'allow_unknown': True,
|
||||||
},
|
|
||||||
'avatar': {
|
|
||||||
'type': 'dict',
|
|
||||||
'schema': {
|
'schema': {
|
||||||
'file': {
|
'badger': {
|
||||||
'type': 'objectid',
|
'type': 'list',
|
||||||
'data_relation': {
|
'schema': {'type': 'string'}
|
||||||
'resource': 'files',
|
}
|
||||||
'field': '_id',
|
}
|
||||||
},
|
|
||||||
},
|
|
||||||
# For only downloading when things really changed:
|
|
||||||
'last_downloaded_url': {
|
|
||||||
'type': 'string',
|
|
||||||
},
|
|
||||||
'last_modified': {
|
|
||||||
'type': 'string',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
|
|
||||||
# Node-specific information for this user.
|
|
||||||
'nodes': {
|
|
||||||
'type': 'dict',
|
|
||||||
'schema': {
|
|
||||||
# Per watched video info about where the user left off, both in time and in percent.
|
|
||||||
'view_progress': {
|
|
||||||
'type': 'dict',
|
|
||||||
# Keyed by Node ID of the video asset. MongoDB doesn't support using
|
|
||||||
# ObjectIds as key, so we cast them to string instead.
|
|
||||||
'keysrules': {'type': 'string'},
|
|
||||||
'valuesrules': {
|
|
||||||
'type': 'dict',
|
|
||||||
'schema': {
|
|
||||||
'progress_in_sec': {'type': 'float', 'min': 0},
|
|
||||||
'progress_in_percent': {'type': 'integer', 'min': 0, 'max': 100},
|
|
||||||
|
|
||||||
# When the progress was last updated, so we can limit this history to
|
|
||||||
# the last-watched N videos if we want, or show stuff in chrono order.
|
|
||||||
'last_watched': {'type': 'datetime'},
|
|
||||||
|
|
||||||
# True means progress_in_percent = 100, for easy querying
|
|
||||||
'done': {'type': 'boolean', 'default': False},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
|
|
||||||
'badges': {
|
|
||||||
'type': 'dict',
|
|
||||||
'schema': {
|
|
||||||
'html': {'type': 'string'}, # HTML fetched from Blender ID.
|
|
||||||
'expires': {'type': 'datetime'}, # When we should fetch it again.
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
|
|
||||||
# Properties defined by extensions. Extensions should use their name (see the
|
# Properties defined by extensions. Extensions should use their name (see the
|
||||||
@ -205,7 +152,12 @@ organizations_schema = {
|
|||||||
'maxlength': 128,
|
'maxlength': 128,
|
||||||
'required': True
|
'required': True
|
||||||
},
|
},
|
||||||
**markdown_fields('description', maxlength=256),
|
'description': {
|
||||||
|
'type': 'string',
|
||||||
|
'maxlength': 256,
|
||||||
|
'coerce': 'markdown',
|
||||||
|
},
|
||||||
|
'_description_html': {'type': 'string'},
|
||||||
'website': {
|
'website': {
|
||||||
'type': 'string',
|
'type': 'string',
|
||||||
'maxlength': 256,
|
'maxlength': 256,
|
||||||
@ -275,7 +227,7 @@ organizations_schema = {
|
|||||||
'start': {'type': 'binary', 'required': True},
|
'start': {'type': 'binary', 'required': True},
|
||||||
'end': {'type': 'binary', 'required': True},
|
'end': {'type': 'binary', 'required': True},
|
||||||
'prefix': {'type': 'integer', 'required': True},
|
'prefix': {'type': 'integer', 'required': True},
|
||||||
'human': {'type': 'string', 'required': True, 'check_with': 'iprange'},
|
'human': {'type': 'iprange', 'required': True},
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -338,7 +290,11 @@ nodes_schema = {
|
|||||||
'maxlength': 128,
|
'maxlength': 128,
|
||||||
'required': True,
|
'required': True,
|
||||||
},
|
},
|
||||||
**markdown_fields('description'),
|
'description': {
|
||||||
|
'type': 'string',
|
||||||
|
'coerce': 'markdown',
|
||||||
|
},
|
||||||
|
'_description_html': {'type': 'string'},
|
||||||
'picture': _file_embedded_schema,
|
'picture': _file_embedded_schema,
|
||||||
'order': {
|
'order': {
|
||||||
'type': 'integer',
|
'type': 'integer',
|
||||||
@ -371,7 +327,7 @@ nodes_schema = {
|
|||||||
'properties': {
|
'properties': {
|
||||||
'type': 'dict',
|
'type': 'dict',
|
||||||
'valid_properties': True,
|
'valid_properties': True,
|
||||||
'required': True
|
'required': True,
|
||||||
},
|
},
|
||||||
'permissions': {
|
'permissions': {
|
||||||
'type': 'dict',
|
'type': 'dict',
|
||||||
@ -389,11 +345,11 @@ tokens_schema = {
|
|||||||
},
|
},
|
||||||
'token': {
|
'token': {
|
||||||
'type': 'string',
|
'type': 'string',
|
||||||
'required': True,
|
'required': False,
|
||||||
},
|
},
|
||||||
'token_hashed': {
|
'token_hashed': {
|
||||||
'type': 'string',
|
'type': 'string',
|
||||||
'required': False,
|
'required': True,
|
||||||
},
|
},
|
||||||
'expire_time': {
|
'expire_time': {
|
||||||
'type': 'datetime',
|
'type': 'datetime',
|
||||||
@ -412,13 +368,6 @@ tokens_schema = {
|
|||||||
'type': 'string',
|
'type': 'string',
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
# OAuth scopes granted to this token.
|
|
||||||
'oauth_scopes': {
|
|
||||||
'type': 'list',
|
|
||||||
'default': [],
|
|
||||||
'schema': {'type': 'string'},
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
files_schema = {
|
files_schema = {
|
||||||
@ -476,7 +425,7 @@ files_schema = {
|
|||||||
'backend': {
|
'backend': {
|
||||||
'type': 'string',
|
'type': 'string',
|
||||||
'required': True,
|
'required': True,
|
||||||
'allowed': STORAGE_BACKENDS,
|
'allowed': ["local", "pillar", "cdnsun", "gcs", "unittest"]
|
||||||
},
|
},
|
||||||
|
|
||||||
# Where the file is in the backend storage itself. In the case of GCS,
|
# Where the file is in the backend storage itself. In the case of GCS,
|
||||||
@ -588,7 +537,11 @@ projects_schema = {
|
|||||||
'maxlength': 128,
|
'maxlength': 128,
|
||||||
'required': True,
|
'required': True,
|
||||||
},
|
},
|
||||||
**markdown_fields('description'),
|
'description': {
|
||||||
|
'type': 'string',
|
||||||
|
'coerce': 'markdown',
|
||||||
|
},
|
||||||
|
'_description_html': {'type': 'string'},
|
||||||
# Short summary for the project
|
# Short summary for the project
|
||||||
'summary': {
|
'summary': {
|
||||||
'type': 'string',
|
'type': 'string',
|
||||||
@ -598,8 +551,6 @@ projects_schema = {
|
|||||||
'picture_square': _file_embedded_schema,
|
'picture_square': _file_embedded_schema,
|
||||||
# Header
|
# Header
|
||||||
'picture_header': _file_embedded_schema,
|
'picture_header': _file_embedded_schema,
|
||||||
# Picture with a 16:9 aspect ratio (for Open Graph)
|
|
||||||
'picture_16_9': _file_embedded_schema,
|
|
||||||
'header_node': dict(
|
'header_node': dict(
|
||||||
nullable=True,
|
nullable=True,
|
||||||
**_node_embedded_schema
|
**_node_embedded_schema
|
||||||
@ -882,9 +833,4 @@ UPSET_ON_PUT = False # do not create new document on PUT of non-existant URL.
|
|||||||
X_DOMAINS = '*'
|
X_DOMAINS = '*'
|
||||||
X_ALLOW_CREDENTIALS = True
|
X_ALLOW_CREDENTIALS = True
|
||||||
X_HEADERS = 'Authorization'
|
X_HEADERS = 'Authorization'
|
||||||
RENDERERS = ['eve.render.JSONRenderer']
|
XML = False
|
||||||
|
|
||||||
# TODO(Sybren): this is a quick workaround to make /p/{url}/jstree work again.
|
|
||||||
# Apparently Eve is now stricter in checking against MONGO_QUERY_BLACKLIST, and
|
|
||||||
# blocks our use of $regex.
|
|
||||||
MONGO_QUERY_BLACKLIST = ['$where']
|
|
||||||
|
@ -5,7 +5,6 @@ import mimetypes
|
|||||||
import os
|
import os
|
||||||
import pathlib
|
import pathlib
|
||||||
import tempfile
|
import tempfile
|
||||||
import time
|
|
||||||
import typing
|
import typing
|
||||||
import uuid
|
import uuid
|
||||||
from hashlib import md5
|
from hashlib import md5
|
||||||
@ -131,67 +130,6 @@ def _process_image(bucket: Bucket,
|
|||||||
src_file['status'] = 'complete'
|
src_file['status'] = 'complete'
|
||||||
|
|
||||||
|
|
||||||
def _video_duration_seconds(filename: pathlib.Path) -> typing.Optional[int]:
|
|
||||||
"""Get the duration of a video file using ffprobe
|
|
||||||
https://superuser.com/questions/650291/how-to-get-video-duration-in-seconds
|
|
||||||
|
|
||||||
:param filename: file path to video
|
|
||||||
:return: video duration in seconds
|
|
||||||
"""
|
|
||||||
import subprocess
|
|
||||||
|
|
||||||
def run(cli_args):
|
|
||||||
if log.isEnabledFor(logging.INFO):
|
|
||||||
import shlex
|
|
||||||
cmd = ' '.join(shlex.quote(s) for s in cli_args)
|
|
||||||
log.info('Calling %s', cmd)
|
|
||||||
|
|
||||||
ffprobe = subprocess.run(
|
|
||||||
cli_args,
|
|
||||||
stdin=subprocess.DEVNULL,
|
|
||||||
stdout=subprocess.PIPE,
|
|
||||||
stderr=subprocess.STDOUT,
|
|
||||||
timeout=10, # seconds
|
|
||||||
)
|
|
||||||
|
|
||||||
if ffprobe.returncode:
|
|
||||||
import shlex
|
|
||||||
cmd = ' '.join(shlex.quote(s) for s in cli_args)
|
|
||||||
log.error('Error running %s: stopped with return code %i',
|
|
||||||
cmd, ffprobe.returncode)
|
|
||||||
log.error('Output was: %s', ffprobe.stdout)
|
|
||||||
return None
|
|
||||||
|
|
||||||
try:
|
|
||||||
return int(float(ffprobe.stdout))
|
|
||||||
except ValueError as e:
|
|
||||||
log.exception('ffprobe produced invalid number: %s', ffprobe.stdout)
|
|
||||||
return None
|
|
||||||
|
|
||||||
ffprobe_from_container_args = [
|
|
||||||
current_app.config['BIN_FFPROBE'],
|
|
||||||
'-v', 'error',
|
|
||||||
'-show_entries', 'format=duration',
|
|
||||||
'-of', 'default=noprint_wrappers=1:nokey=1',
|
|
||||||
str(filename),
|
|
||||||
]
|
|
||||||
|
|
||||||
ffprobe_from_stream_args = [
|
|
||||||
current_app.config['BIN_FFPROBE'],
|
|
||||||
'-v', 'error',
|
|
||||||
'-hide_banner',
|
|
||||||
'-select_streams', 'v:0', # we only care about the first video stream
|
|
||||||
'-show_entries', 'stream=duration',
|
|
||||||
'-of', 'default=noprint_wrappers=1:nokey=1',
|
|
||||||
str(filename),
|
|
||||||
]
|
|
||||||
|
|
||||||
duration = run(ffprobe_from_stream_args) or \
|
|
||||||
run(ffprobe_from_container_args) or \
|
|
||||||
None
|
|
||||||
return duration
|
|
||||||
|
|
||||||
|
|
||||||
def _video_size_pixels(filename: pathlib.Path) -> typing.Tuple[int, int]:
|
def _video_size_pixels(filename: pathlib.Path) -> typing.Tuple[int, int]:
|
||||||
"""Figures out the size (in pixels) of the video file.
|
"""Figures out the size (in pixels) of the video file.
|
||||||
|
|
||||||
@ -282,10 +220,8 @@ def _process_video(gcs,
|
|||||||
# by determining the video size here we already have this information in the file
|
# by determining the video size here we already have this information in the file
|
||||||
# document before Zencoder calls our notification URL. It also opens up possibilities
|
# document before Zencoder calls our notification URL. It also opens up possibilities
|
||||||
# for other encoding backends that don't support this functionality.
|
# for other encoding backends that don't support this functionality.
|
||||||
video_path = pathlib.Path(local_file.name)
|
video_width, video_height = _video_size_pixels(pathlib.Path(local_file.name))
|
||||||
video_width, video_height = _video_size_pixels(video_path)
|
|
||||||
capped_video_width, capped_video_height = _video_cap_at_1080(video_width, video_height)
|
capped_video_width, capped_video_height = _video_cap_at_1080(video_width, video_height)
|
||||||
video_duration = _video_duration_seconds(video_path)
|
|
||||||
|
|
||||||
# Create variations
|
# Create variations
|
||||||
root, _ = os.path.splitext(src_file['file_path'])
|
root, _ = os.path.splitext(src_file['file_path'])
|
||||||
@ -298,13 +234,12 @@ def _process_video(gcs,
|
|||||||
content_type='video/{}'.format(v),
|
content_type='video/{}'.format(v),
|
||||||
file_path='{}-{}.{}'.format(root, v, v),
|
file_path='{}-{}.{}'.format(root, v, v),
|
||||||
size='',
|
size='',
|
||||||
|
duration=0,
|
||||||
width=capped_video_width,
|
width=capped_video_width,
|
||||||
height=capped_video_height,
|
height=capped_video_height,
|
||||||
length=0,
|
length=0,
|
||||||
md5='',
|
md5='',
|
||||||
)
|
)
|
||||||
if video_duration:
|
|
||||||
file_variation['duration'] = video_duration
|
|
||||||
# Append file variation. Originally mp4 and webm were the available options,
|
# Append file variation. Originally mp4 and webm were the available options,
|
||||||
# that's why we build a list.
|
# that's why we build a list.
|
||||||
src_file['variations'].append(file_variation)
|
src_file['variations'].append(file_variation)
|
||||||
@ -470,7 +405,7 @@ def before_returning_files(response):
|
|||||||
ensure_valid_link(item)
|
ensure_valid_link(item)
|
||||||
|
|
||||||
|
|
||||||
def ensure_valid_link(response: dict) -> None:
|
def ensure_valid_link(response):
|
||||||
"""Ensures the file item has valid file links using generate_link(...)."""
|
"""Ensures the file item has valid file links using generate_link(...)."""
|
||||||
|
|
||||||
# Log to function-specific logger, so we can easily turn it off.
|
# Log to function-specific logger, so we can easily turn it off.
|
||||||
@ -495,13 +430,12 @@ def ensure_valid_link(response: dict) -> None:
|
|||||||
generate_all_links(response, now)
|
generate_all_links(response, now)
|
||||||
|
|
||||||
|
|
||||||
def generate_all_links(response: dict, now: datetime.datetime) -> None:
|
def generate_all_links(response, now):
|
||||||
"""Generate a new link for the file and all its variations.
|
"""Generate a new link for the file and all its variations.
|
||||||
|
|
||||||
:param response: the file document that should be updated.
|
:param response: the file document that should be updated.
|
||||||
:param now: datetime that reflects 'now', for consistent expiry generation.
|
:param now: datetime that reflects 'now', for consistent expiry generation.
|
||||||
"""
|
"""
|
||||||
assert isinstance(response, dict), f'response must be dict, is {response!r}'
|
|
||||||
|
|
||||||
project_id = str(
|
project_id = str(
|
||||||
response['project']) if 'project' in response else None
|
response['project']) if 'project' in response else None
|
||||||
@ -566,10 +500,13 @@ def on_pre_get_files(_, lookup):
|
|||||||
lookup_expired = lookup.copy()
|
lookup_expired = lookup.copy()
|
||||||
lookup_expired['link_expires'] = {'$lte': now}
|
lookup_expired['link_expires'] = {'$lte': now}
|
||||||
|
|
||||||
cursor, _ = current_app.data.find('files', parsed_req, lookup_expired, perform_count=False)
|
cursor = current_app.data.find('files', parsed_req, lookup_expired)
|
||||||
for idx, file_doc in enumerate(cursor):
|
if cursor.count() == 0:
|
||||||
if idx == 0:
|
return
|
||||||
log.debug('Updating expired links for files that matched lookup %s', lookup_expired)
|
|
||||||
|
log.debug('Updating expired links for %d files that matched lookup %s',
|
||||||
|
cursor.count(), lookup_expired)
|
||||||
|
for file_doc in cursor:
|
||||||
# log.debug('Updating expired links for file %r.', file_doc['_id'])
|
# log.debug('Updating expired links for file %r.', file_doc['_id'])
|
||||||
generate_all_links(file_doc, now)
|
generate_all_links(file_doc, now)
|
||||||
|
|
||||||
@ -593,21 +530,21 @@ def refresh_links_for_project(project_uuid, chunk_size, expiry_seconds):
|
|||||||
'link_expires': {'$lt': expire_before},
|
'link_expires': {'$lt': expire_before},
|
||||||
}).sort([('link_expires', pymongo.ASCENDING)]).limit(chunk_size)
|
}).sort([('link_expires', pymongo.ASCENDING)]).limit(chunk_size)
|
||||||
|
|
||||||
refresh_count = 0
|
if to_refresh.count() == 0:
|
||||||
|
log.info('No links to refresh.')
|
||||||
|
return
|
||||||
|
|
||||||
for file_doc in to_refresh:
|
for file_doc in to_refresh:
|
||||||
log.debug('Refreshing links for file %s', file_doc['_id'])
|
log.debug('Refreshing links for file %s', file_doc['_id'])
|
||||||
generate_all_links(file_doc, now)
|
generate_all_links(file_doc, now)
|
||||||
refresh_count += 1
|
|
||||||
|
|
||||||
if refresh_count:
|
log.info('Refreshed %i links', min(chunk_size, to_refresh.count()))
|
||||||
log.info('Refreshed %i links', refresh_count)
|
|
||||||
|
|
||||||
|
|
||||||
def refresh_links_for_backend(backend_name, chunk_size, expiry_seconds):
|
def refresh_links_for_backend(backend_name, chunk_size, expiry_seconds):
|
||||||
import gcloud.exceptions
|
import gcloud.exceptions
|
||||||
|
|
||||||
my_log = log.getChild(f'refresh_links_for_backend.{backend_name}')
|
my_log = log.getChild(f'refresh_links_for_backend.{backend_name}')
|
||||||
start_time = time.time()
|
|
||||||
|
|
||||||
# Retrieve expired links.
|
# Retrieve expired links.
|
||||||
files_collection = current_app.data.driver.db['files']
|
files_collection = current_app.data.driver.db['files']
|
||||||
@ -618,27 +555,23 @@ def refresh_links_for_backend(backend_name, chunk_size, expiry_seconds):
|
|||||||
my_log.info('Limiting to links that expire before %s', expire_before)
|
my_log.info('Limiting to links that expire before %s', expire_before)
|
||||||
|
|
||||||
base_query = {'backend': backend_name, '_deleted': {'$ne': True}}
|
base_query = {'backend': backend_name, '_deleted': {'$ne': True}}
|
||||||
to_refresh_query = {
|
to_refresh = files_collection.find(
|
||||||
'$or': [{'link_expires': None, **base_query},
|
{'$or': [{'link_expires': None, **base_query},
|
||||||
{'link_expires': {'$lt': expire_before}, **base_query},
|
{'link_expires': {'$lt': expire_before}, **base_query},
|
||||||
{'link': None, **base_query}]
|
{'link': None, **base_query}]
|
||||||
}
|
}).sort([('link_expires', pymongo.ASCENDING)]).limit(
|
||||||
|
chunk_size).batch_size(5)
|
||||||
|
|
||||||
document_count = files_collection.count_documents(to_refresh_query)
|
document_count = to_refresh.count()
|
||||||
if document_count == 0:
|
if document_count == 0:
|
||||||
my_log.info('No links to refresh.')
|
my_log.info('No links to refresh.')
|
||||||
return
|
return
|
||||||
|
|
||||||
if 0 < chunk_size == document_count:
|
if 0 < chunk_size == document_count:
|
||||||
my_log.info('Found %d documents to refresh, probably limited by the chunk size %d',
|
my_log.info('Found %d documents to refresh, probably limited by the chunk size.',
|
||||||
document_count, chunk_size)
|
document_count)
|
||||||
else:
|
else:
|
||||||
my_log.info('Found %d documents to refresh, chunk size=%d', document_count, chunk_size)
|
my_log.info('Found %d documents to refresh.', document_count)
|
||||||
|
|
||||||
to_refresh = files_collection.find(to_refresh_query)\
|
|
||||||
.sort([('link_expires', pymongo.ASCENDING)])\
|
|
||||||
.limit(chunk_size)\
|
|
||||||
.batch_size(5)
|
|
||||||
|
|
||||||
refreshed = 0
|
refreshed = 0
|
||||||
report_chunks = min(max(5, document_count // 25), 100)
|
report_chunks = min(max(5, document_count // 25), 100)
|
||||||
@ -650,7 +583,7 @@ def refresh_links_for_backend(backend_name, chunk_size, expiry_seconds):
|
|||||||
my_log.debug('Skipping file %s, it has no project.', file_id)
|
my_log.debug('Skipping file %s, it has no project.', file_id)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
count = proj_coll.count_documents({'_id': project_id, '$or': [
|
count = proj_coll.count({'_id': project_id, '$or': [
|
||||||
{'_deleted': {'$exists': False}},
|
{'_deleted': {'$exists': False}},
|
||||||
{'_deleted': False},
|
{'_deleted': False},
|
||||||
]})
|
]})
|
||||||
@ -682,10 +615,8 @@ def refresh_links_for_backend(backend_name, chunk_size, expiry_seconds):
|
|||||||
'links', refreshed)
|
'links', refreshed)
|
||||||
return
|
return
|
||||||
|
|
||||||
if refreshed % report_chunks != 0:
|
|
||||||
my_log.info('Refreshed %i links', refreshed)
|
my_log.info('Refreshed %i links', refreshed)
|
||||||
|
|
||||||
my_log.info('Refresh took %s', datetime.timedelta(seconds=time.time() - start_time))
|
|
||||||
|
|
||||||
@require_login()
|
@require_login()
|
||||||
def create_file_doc(name, filename, content_type, length, project,
|
def create_file_doc(name, filename, content_type, length, project,
|
||||||
@ -821,10 +752,6 @@ def stream_to_storage(project_id: str):
|
|||||||
local_file = uploaded_file.stream
|
local_file = uploaded_file.stream
|
||||||
|
|
||||||
result = upload_and_process(local_file, uploaded_file, project_id)
|
result = upload_and_process(local_file, uploaded_file, project_id)
|
||||||
|
|
||||||
# Local processing is done, we can close the local file so it is removed.
|
|
||||||
local_file.close()
|
|
||||||
|
|
||||||
resp = jsonify(result)
|
resp = jsonify(result)
|
||||||
resp.status_code = result['status_code']
|
resp.status_code = result['status_code']
|
||||||
add_access_control_headers(resp)
|
add_access_control_headers(resp)
|
||||||
@ -833,9 +760,7 @@ def stream_to_storage(project_id: str):
|
|||||||
|
|
||||||
def upload_and_process(local_file: typing.Union[io.BytesIO, typing.BinaryIO],
|
def upload_and_process(local_file: typing.Union[io.BytesIO, typing.BinaryIO],
|
||||||
uploaded_file: werkzeug.datastructures.FileStorage,
|
uploaded_file: werkzeug.datastructures.FileStorage,
|
||||||
project_id: str,
|
project_id: str):
|
||||||
*,
|
|
||||||
may_process_file=True) -> dict:
|
|
||||||
# Figure out the file size, as we need to pass this in explicitly to GCloud.
|
# Figure out the file size, as we need to pass this in explicitly to GCloud.
|
||||||
# Otherwise it always uses os.fstat(file_obj.fileno()).st_size, which isn't
|
# Otherwise it always uses os.fstat(file_obj.fileno()).st_size, which isn't
|
||||||
# supported by a BytesIO object (even though it does have a fileno
|
# supported by a BytesIO object (even though it does have a fileno
|
||||||
@ -862,16 +787,19 @@ def upload_and_process(local_file: typing.Union[io.BytesIO, typing.BinaryIO],
|
|||||||
'size=%i as "queued_for_processing"',
|
'size=%i as "queued_for_processing"',
|
||||||
file_id, internal_fname, file_size)
|
file_id, internal_fname, file_size)
|
||||||
update_file_doc(file_id,
|
update_file_doc(file_id,
|
||||||
status='queued_for_processing' if may_process_file else 'complete',
|
status='queued_for_processing',
|
||||||
file_path=internal_fname,
|
file_path=internal_fname,
|
||||||
length=blob.size,
|
length=blob.size,
|
||||||
content_type=uploaded_file.mimetype)
|
content_type=uploaded_file.mimetype)
|
||||||
|
|
||||||
if may_process_file:
|
|
||||||
log.debug('Processing uploaded file id=%s, fname=%s, size=%i', file_id,
|
log.debug('Processing uploaded file id=%s, fname=%s, size=%i', file_id,
|
||||||
internal_fname, blob.size)
|
internal_fname, blob.size)
|
||||||
process_file(bucket, file_id, local_file)
|
process_file(bucket, file_id, local_file)
|
||||||
|
|
||||||
|
# Local processing is done, we can close the local file so it is removed.
|
||||||
|
if local_file is not None:
|
||||||
|
local_file.close()
|
||||||
|
|
||||||
log.debug('Handled uploaded file id=%s, fname=%s, size=%i, status=%i',
|
log.debug('Handled uploaded file id=%s, fname=%s, size=%i, status=%i',
|
||||||
file_id, internal_fname, blob.size, status)
|
file_id, internal_fname, blob.size, status)
|
||||||
|
|
||||||
@ -984,50 +912,7 @@ def compute_aggregate_length_items(file_docs):
|
|||||||
compute_aggregate_length(file_doc)
|
compute_aggregate_length(file_doc)
|
||||||
|
|
||||||
|
|
||||||
def get_file_url(file_id: ObjectId, variation='') -> str:
|
|
||||||
"""Return the URL of a file in storage.
|
|
||||||
|
|
||||||
Note that this function is cached, see setup_app().
|
|
||||||
|
|
||||||
:param file_id: the ID of the file
|
|
||||||
:param variation: if non-empty, indicates the variation of of the file
|
|
||||||
to return the URL for; if empty, returns the URL of the original.
|
|
||||||
|
|
||||||
:return: the URL, or an empty string if the file/variation does not exist.
|
|
||||||
"""
|
|
||||||
|
|
||||||
file_coll = current_app.db('files')
|
|
||||||
db_file = file_coll.find_one({'_id': file_id})
|
|
||||||
if not db_file:
|
|
||||||
return ''
|
|
||||||
|
|
||||||
ensure_valid_link(db_file)
|
|
||||||
|
|
||||||
if variation:
|
|
||||||
variations = file_doc.get('variations', ())
|
|
||||||
for file_var in variations:
|
|
||||||
if file_var['size'] == variation:
|
|
||||||
return file_var['link']
|
|
||||||
return ''
|
|
||||||
|
|
||||||
return db_file['link']
|
|
||||||
|
|
||||||
|
|
||||||
def update_file_doc(file_id, **updates):
|
|
||||||
files = current_app.data.driver.db['files']
|
|
||||||
res = files.update_one({'_id': ObjectId(file_id)},
|
|
||||||
{'$set': updates})
|
|
||||||
log.debug('update_file_doc(%s, %s): %i matched, %i updated.',
|
|
||||||
file_id, updates, res.matched_count, res.modified_count)
|
|
||||||
return res
|
|
||||||
|
|
||||||
|
|
||||||
def setup_app(app, url_prefix):
|
def setup_app(app, url_prefix):
|
||||||
global get_file_url
|
|
||||||
|
|
||||||
cached = app.cache.memoize(timeout=10)
|
|
||||||
get_file_url = cached(get_file_url)
|
|
||||||
|
|
||||||
app.on_pre_GET_files += on_pre_get_files
|
app.on_pre_GET_files += on_pre_get_files
|
||||||
|
|
||||||
app.on_fetched_item_files += before_returning_file
|
app.on_fetched_item_files += before_returning_file
|
||||||
@ -1038,3 +923,12 @@ def setup_app(app, url_prefix):
|
|||||||
app.on_insert_files += compute_aggregate_length_items
|
app.on_insert_files += compute_aggregate_length_items
|
||||||
|
|
||||||
app.register_api_blueprint(file_storage, url_prefix=url_prefix)
|
app.register_api_blueprint(file_storage, url_prefix=url_prefix)
|
||||||
|
|
||||||
|
|
||||||
|
def update_file_doc(file_id, **updates):
|
||||||
|
files = current_app.data.driver.db['files']
|
||||||
|
res = files.update_one({'_id': ObjectId(file_id)},
|
||||||
|
{'$set': updates})
|
||||||
|
log.debug('update_file_doc(%s, %s): %i matched, %i updated.',
|
||||||
|
file_id, updates, res.matched_count, res.modified_count)
|
||||||
|
return res
|
||||||
|
@ -90,11 +90,12 @@ class Blob(metaclass=abc.ABCMeta):
|
|||||||
|
|
||||||
def __init__(self, name: str, bucket: Bucket) -> None:
|
def __init__(self, name: str, bucket: Bucket) -> None:
|
||||||
self.name = name
|
self.name = name
|
||||||
"""Name of this blob in the bucket."""
|
|
||||||
|
|
||||||
self.bucket = bucket
|
self.bucket = bucket
|
||||||
self._size_in_bytes: typing.Optional[int] = None
|
self._size_in_bytes: typing.Optional[int] = None
|
||||||
|
|
||||||
|
self.filename: str = None
|
||||||
|
"""Name of the file for the Content-Disposition header when downloading it."""
|
||||||
|
|
||||||
self._log = logging.getLogger(f'{__name__}.Blob')
|
self._log = logging.getLogger(f'{__name__}.Blob')
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
@ -132,19 +133,12 @@ class Blob(metaclass=abc.ABCMeta):
|
|||||||
file_size=file_size)
|
file_size=file_size)
|
||||||
|
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def update_filename(self, filename: str, *, is_attachment=True):
|
def update_filename(self, filename: str):
|
||||||
"""Sets the filename which is used when downloading the file.
|
"""Sets the filename which is used when downloading the file.
|
||||||
|
|
||||||
Not all storage backends support this, and will use the on-disk filename instead.
|
Not all storage backends support this, and will use the on-disk filename instead.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def update_content_type(self, content_type: str, content_encoding: str = ''):
|
|
||||||
"""Set the content type (and optionally content encoding).
|
|
||||||
|
|
||||||
Not all storage backends support this.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def get_url(self, *, is_public: bool) -> str:
|
def get_url(self, *, is_public: bool) -> str:
|
||||||
"""Returns the URL to access this blob.
|
"""Returns the URL to access this blob.
|
||||||
|
@ -174,7 +174,7 @@ class GoogleCloudStorageBlob(Blob):
|
|||||||
self.gblob.reload()
|
self.gblob.reload()
|
||||||
self._size_in_bytes = self.gblob.size
|
self._size_in_bytes = self.gblob.size
|
||||||
|
|
||||||
def update_filename(self, filename: str, *, is_attachment=True):
|
def update_filename(self, filename: str):
|
||||||
"""Set the ContentDisposition metadata so that when a file is downloaded
|
"""Set the ContentDisposition metadata so that when a file is downloaded
|
||||||
it has a human-readable name.
|
it has a human-readable name.
|
||||||
"""
|
"""
|
||||||
@ -182,17 +182,7 @@ class GoogleCloudStorageBlob(Blob):
|
|||||||
if '"' in filename:
|
if '"' in filename:
|
||||||
raise ValueError(f'Filename is not allowed to have double quote in it: {filename!r}')
|
raise ValueError(f'Filename is not allowed to have double quote in it: {filename!r}')
|
||||||
|
|
||||||
if is_attachment:
|
|
||||||
self.gblob.content_disposition = f'attachment; filename="{filename}"'
|
self.gblob.content_disposition = f'attachment; filename="{filename}"'
|
||||||
else:
|
|
||||||
self.gblob.content_disposition = f'filename="{filename}"'
|
|
||||||
self.gblob.patch()
|
|
||||||
|
|
||||||
def update_content_type(self, content_type: str, content_encoding: str = ''):
|
|
||||||
"""Set the content type (and optionally content encoding)."""
|
|
||||||
|
|
||||||
self.gblob.content_type = content_type
|
|
||||||
self.gblob.content_encoding = content_encoding
|
|
||||||
self.gblob.patch()
|
self.gblob.patch()
|
||||||
|
|
||||||
def get_url(self, *, is_public: bool) -> str:
|
def get_url(self, *, is_public: bool) -> str:
|
||||||
|
@ -113,13 +113,10 @@ class LocalBlob(Blob):
|
|||||||
|
|
||||||
self._size_in_bytes = file_size
|
self._size_in_bytes = file_size
|
||||||
|
|
||||||
def update_filename(self, filename: str, *, is_attachment=True):
|
def update_filename(self, filename: str):
|
||||||
# TODO: implement this for local storage.
|
# TODO: implement this for local storage.
|
||||||
self._log.info('update_filename(%r) not supported', filename)
|
self._log.info('update_filename(%r) not supported', filename)
|
||||||
|
|
||||||
def update_content_type(self, content_type: str, content_encoding: str = ''):
|
|
||||||
self._log.info('update_content_type(%r, %r) not supported', content_type, content_encoding)
|
|
||||||
|
|
||||||
def make_public(self):
|
def make_public(self):
|
||||||
# No-op on this storage backend.
|
# No-op on this storage backend.
|
||||||
pass
|
pass
|
||||||
|
@ -29,6 +29,7 @@ def latest_nodes(db_filter, projection, limit):
|
|||||||
proj = {
|
proj = {
|
||||||
'_created': 1,
|
'_created': 1,
|
||||||
'_updated': 1,
|
'_updated': 1,
|
||||||
|
'user.full_name': 1,
|
||||||
'project._id': 1,
|
'project._id': 1,
|
||||||
'project.url': 1,
|
'project.url': 1,
|
||||||
'project.name': 1,
|
'project.name': 1,
|
||||||
@ -69,7 +70,6 @@ def latest_assets():
|
|||||||
{'name': 1, 'node_type': 1,
|
{'name': 1, 'node_type': 1,
|
||||||
'parent': 1, 'picture': 1, 'properties.status': 1,
|
'parent': 1, 'picture': 1, 'properties.status': 1,
|
||||||
'properties.content_type': 1,
|
'properties.content_type': 1,
|
||||||
'properties.duration_seconds': 1,
|
|
||||||
'permissions.world': 1},
|
'permissions.world': 1},
|
||||||
12)
|
12)
|
||||||
|
|
||||||
@ -80,7 +80,7 @@ def latest_assets():
|
|||||||
def latest_comments():
|
def latest_comments():
|
||||||
latest = latest_nodes({'node_type': 'comment',
|
latest = latest_nodes({'node_type': 'comment',
|
||||||
'properties.status': 'published'},
|
'properties.status': 'published'},
|
||||||
{'parent': 1, 'user.full_name': 1,
|
{'parent': 1,
|
||||||
'properties.content': 1, 'node_type': 1,
|
'properties.content': 1, 'node_type': 1,
|
||||||
'properties.status': 1,
|
'properties.status': 1,
|
||||||
'properties.is_reply': 1},
|
'properties.is_reply': 1},
|
||||||
|
@ -94,10 +94,17 @@ def generate_and_store_token(user_id, days=15, prefix=b'') -> dict:
|
|||||||
|
|
||||||
# Use 'xy' as altargs to prevent + and / characters from appearing.
|
# Use 'xy' as altargs to prevent + and / characters from appearing.
|
||||||
# We never have to b64decode the string anyway.
|
# We never have to b64decode the string anyway.
|
||||||
token = prefix + base64.b64encode(random_bits, altchars=b'xy').strip(b'=')
|
token_bytes = prefix + base64.b64encode(random_bits, altchars=b'xy').strip(b'=')
|
||||||
|
token = token_bytes.decode('ascii')
|
||||||
|
|
||||||
token_expiry = utcnow() + datetime.timedelta(days=days)
|
token_expiry = utcnow() + datetime.timedelta(days=days)
|
||||||
return store_token(user_id, token.decode('ascii'), token_expiry)
|
token_data = store_token(user_id, token, token_expiry)
|
||||||
|
|
||||||
|
# Include the token in the returned document so that it can be stored client-side,
|
||||||
|
# in configuration, etc.
|
||||||
|
token_data['token'] = token
|
||||||
|
|
||||||
|
return token_data
|
||||||
|
|
||||||
|
|
||||||
def hash_password(password: str, salt: typing.Union[str, bytes]) -> str:
|
def hash_password(password: str, salt: typing.Union[str, bytes]) -> str:
|
||||||
|
@ -11,17 +11,26 @@ ATTACHMENT_SLUG_REGEX = r'[a-zA-Z0-9_\-]+'
|
|||||||
|
|
||||||
attachments_embedded_schema = {
|
attachments_embedded_schema = {
|
||||||
'type': 'dict',
|
'type': 'dict',
|
||||||
'keysrules': {
|
# TODO: will be renamed to 'keyschema' in Cerberus 1.0
|
||||||
|
'propertyschema': {
|
||||||
'type': 'string',
|
'type': 'string',
|
||||||
'regex': '^%s$' % ATTACHMENT_SLUG_REGEX,
|
'regex': '^%s$' % ATTACHMENT_SLUG_REGEX,
|
||||||
},
|
},
|
||||||
'valuesrules': {
|
'valueschema': {
|
||||||
'type': 'dict',
|
'type': 'dict',
|
||||||
'schema': {
|
'schema': {
|
||||||
'oid': {
|
'oid': {
|
||||||
'type': 'objectid',
|
'type': 'objectid',
|
||||||
'required': True,
|
'required': True,
|
||||||
},
|
},
|
||||||
|
'link': {
|
||||||
|
'type': 'string',
|
||||||
|
'allowed': ['self', 'none', 'custom'],
|
||||||
|
'default': 'self',
|
||||||
|
},
|
||||||
|
'link_custom': {
|
||||||
|
'type': 'string',
|
||||||
|
},
|
||||||
'collection': {
|
'collection': {
|
||||||
'type': 'string',
|
'type': 'string',
|
||||||
'allowed': ['files'],
|
'allowed': ['files'],
|
||||||
|
@ -24,10 +24,6 @@ node_type_asset = {
|
|||||||
'content_type': {
|
'content_type': {
|
||||||
'type': 'string'
|
'type': 'string'
|
||||||
},
|
},
|
||||||
# The duration of a video asset in seconds.
|
|
||||||
'duration_seconds': {
|
|
||||||
'type': 'integer'
|
|
||||||
},
|
|
||||||
# We point to the original file (and use it to extract any relevant
|
# We point to the original file (and use it to extract any relevant
|
||||||
# variation useful for our scope).
|
# variation useful for our scope).
|
||||||
'file': _file_embedded_schema,
|
'file': _file_embedded_schema,
|
||||||
@ -62,7 +58,6 @@ node_type_asset = {
|
|||||||
},
|
},
|
||||||
'form_schema': {
|
'form_schema': {
|
||||||
'content_type': {'visible': False},
|
'content_type': {'visible': False},
|
||||||
'duration_seconds': {'visible': False},
|
|
||||||
'order': {'visible': False},
|
'order': {'visible': False},
|
||||||
'tags': {'visible': False},
|
'tags': {'visible': False},
|
||||||
'categories': {'visible': False},
|
'categories': {'visible': False},
|
||||||
|
@ -1,15 +1,15 @@
|
|||||||
from pillar.api.node_types import attachments_embedded_schema
|
|
||||||
from pillar.api.node_types.utils import markdown_fields
|
|
||||||
|
|
||||||
node_type_comment = {
|
node_type_comment = {
|
||||||
'name': 'comment',
|
'name': 'comment',
|
||||||
'description': 'Comments for asset nodes, pages, etc.',
|
'description': 'Comments for asset nodes, pages, etc.',
|
||||||
'dyn_schema': {
|
'dyn_schema': {
|
||||||
# The actual comment content
|
# The actual comment content
|
||||||
**markdown_fields(
|
'content': {
|
||||||
'content',
|
'type': 'string',
|
||||||
minlength=5,
|
'minlength': 5,
|
||||||
required=True),
|
'required': True,
|
||||||
|
'coerce': 'markdown',
|
||||||
|
},
|
||||||
|
'_content_html': {'type': 'string'},
|
||||||
'status': {
|
'status': {
|
||||||
'type': 'string',
|
'type': 'string',
|
||||||
'allowed': [
|
'allowed': [
|
||||||
@ -51,8 +51,7 @@ node_type_comment = {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
'confidence': {'type': 'float'},
|
'confidence': {'type': 'float'},
|
||||||
'is_reply': {'type': 'boolean'},
|
'is_reply': {'type': 'boolean'}
|
||||||
'attachments': attachments_embedded_schema,
|
|
||||||
},
|
},
|
||||||
'form_schema': {},
|
'form_schema': {},
|
||||||
'parent': ['asset', 'comment'],
|
'parent': ['asset', 'comment'],
|
||||||
|
@ -3,7 +3,7 @@ node_type_group = {
|
|||||||
'description': 'Folder node type',
|
'description': 'Folder node type',
|
||||||
'parent': ['group', 'project'],
|
'parent': ['group', 'project'],
|
||||||
'dyn_schema': {
|
'dyn_schema': {
|
||||||
|
# Used for sorting within the context of a group
|
||||||
'order': {
|
'order': {
|
||||||
'type': 'integer'
|
'type': 'integer'
|
||||||
},
|
},
|
||||||
@ -20,8 +20,7 @@ node_type_group = {
|
|||||||
'notes': {
|
'notes': {
|
||||||
'type': 'string',
|
'type': 'string',
|
||||||
'maxlength': 256,
|
'maxlength': 256,
|
||||||
}
|
},
|
||||||
|
|
||||||
},
|
},
|
||||||
'form_schema': {
|
'form_schema': {
|
||||||
'url': {'visible': False},
|
'url': {'visible': False},
|
||||||
|
@ -1,14 +1,17 @@
|
|||||||
from pillar.api.node_types import attachments_embedded_schema
|
from pillar.api.node_types import attachments_embedded_schema
|
||||||
from pillar.api.node_types.utils import markdown_fields
|
|
||||||
|
|
||||||
node_type_post = {
|
node_type_post = {
|
||||||
'name': 'post',
|
'name': 'post',
|
||||||
'description': 'A blog post, for any project',
|
'description': 'A blog post, for any project',
|
||||||
'dyn_schema': {
|
'dyn_schema': {
|
||||||
**markdown_fields('content',
|
'content': {
|
||||||
minlength=5,
|
'type': 'string',
|
||||||
maxlength=90000,
|
'minlength': 5,
|
||||||
required=True),
|
'maxlength': 90000,
|
||||||
|
'required': True,
|
||||||
|
'coerce': 'markdown',
|
||||||
|
},
|
||||||
|
'_content_html': {'type': 'string'},
|
||||||
'status': {
|
'status': {
|
||||||
'type': 'string',
|
'type': 'string',
|
||||||
'allowed': [
|
'allowed': [
|
||||||
|
@ -1,34 +0,0 @@
|
|||||||
from pillar import markdown
|
|
||||||
|
|
||||||
|
|
||||||
def markdown_fields(field: str, **kwargs) -> dict:
|
|
||||||
"""
|
|
||||||
Creates a field for the markdown, and a field for the cached html.
|
|
||||||
|
|
||||||
Example usage:
|
|
||||||
schema = {'myDoc': {
|
|
||||||
'type': 'list',
|
|
||||||
'schema': {
|
|
||||||
'type': 'dict',
|
|
||||||
'schema': {
|
|
||||||
**markdown_fields('content', required=True),
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}}
|
|
||||||
|
|
||||||
:param field:
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
cache_field = markdown.cache_field_name(field)
|
|
||||||
return {
|
|
||||||
field: {
|
|
||||||
'type': 'string',
|
|
||||||
**kwargs
|
|
||||||
},
|
|
||||||
cache_field: {
|
|
||||||
'type': 'string',
|
|
||||||
'readonly': True,
|
|
||||||
'default': field, # Name of the field containing the markdown. Will be input to the coerce function.
|
|
||||||
'coerce': 'markdown',
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,19 +1,56 @@
|
|||||||
import base64
|
import base64
|
||||||
import datetime
|
import functools
|
||||||
import logging
|
import logging
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
import pymongo.errors
|
import pymongo.errors
|
||||||
import werkzeug.exceptions as wz_exceptions
|
import werkzeug.exceptions as wz_exceptions
|
||||||
|
from bson import ObjectId
|
||||||
from flask import current_app, Blueprint, request
|
from flask import current_app, Blueprint, request
|
||||||
|
|
||||||
from pillar.api.nodes import eve_hooks, comments, activities
|
from pillar.api.activities import activity_subscribe, activity_object_add
|
||||||
|
from pillar.api.node_types import PILLAR_NAMED_NODE_TYPES
|
||||||
|
from pillar.api.file_storage_backends.gcs import update_file_name
|
||||||
from pillar.api.utils import str2id, jsonify
|
from pillar.api.utils import str2id, jsonify
|
||||||
from pillar.api.utils.authorization import check_permissions, require_login
|
from pillar.api.utils.authorization import check_permissions, require_login
|
||||||
from pillar.web.utils import pretty_date
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
blueprint = Blueprint('nodes_api', __name__)
|
blueprint = Blueprint('nodes_api', __name__)
|
||||||
ROLES_FOR_SHARING = ROLES_FOR_COMMENTING = {'subscriber', 'demo'}
|
ROLES_FOR_SHARING = {'subscriber', 'demo'}
|
||||||
|
|
||||||
|
|
||||||
|
def only_for_node_type_decorator(*required_node_type_names):
|
||||||
|
"""Returns a decorator that checks its first argument's node type.
|
||||||
|
|
||||||
|
If the node type is not of the required node type, returns None,
|
||||||
|
otherwise calls the wrapped function.
|
||||||
|
|
||||||
|
>>> deco = only_for_node_type_decorator('comment')
|
||||||
|
>>> @deco
|
||||||
|
... def handle_comment(node): pass
|
||||||
|
|
||||||
|
>>> deco = only_for_node_type_decorator('comment', 'post')
|
||||||
|
>>> @deco
|
||||||
|
... def handle_comment_or_post(node): pass
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Convert to a set for efficient 'x in required_node_type_names' queries.
|
||||||
|
required_node_type_names = set(required_node_type_names)
|
||||||
|
|
||||||
|
def only_for_node_type(wrapped):
|
||||||
|
@functools.wraps(wrapped)
|
||||||
|
def wrapper(node, *args, **kwargs):
|
||||||
|
if node.get('node_type') not in required_node_type_names:
|
||||||
|
return
|
||||||
|
|
||||||
|
return wrapped(node, *args, **kwargs)
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
only_for_node_type.__doc__ = "Decorator, immediately returns when " \
|
||||||
|
"the first argument is not of type %s." % required_node_type_names
|
||||||
|
return only_for_node_type
|
||||||
|
|
||||||
|
|
||||||
@blueprint.route('/<node_id>/share', methods=['GET', 'POST'])
|
@blueprint.route('/<node_id>/share', methods=['GET', 'POST'])
|
||||||
@ -48,121 +85,7 @@ def share_node(node_id):
|
|||||||
else:
|
else:
|
||||||
return '', 204
|
return '', 204
|
||||||
|
|
||||||
return jsonify(eve_hooks.short_link_info(short_code), status=status)
|
return jsonify(short_link_info(short_code), status=status)
|
||||||
|
|
||||||
|
|
||||||
@blueprint.route('/<string(length=24):node_path>/comments', methods=['GET'])
|
|
||||||
def get_node_comments(node_path: str):
|
|
||||||
node_id = str2id(node_path)
|
|
||||||
return comments.get_node_comments(node_id)
|
|
||||||
|
|
||||||
|
|
||||||
@blueprint.route('/<string(length=24):node_path>/comments', methods=['POST'])
|
|
||||||
@require_login(require_roles=ROLES_FOR_COMMENTING)
|
|
||||||
def post_node_comment(node_path: str):
|
|
||||||
node_id = str2id(node_path)
|
|
||||||
msg = request.json['msg']
|
|
||||||
attachments = request.json.get('attachments', {})
|
|
||||||
return comments.post_node_comment(node_id, msg, attachments)
|
|
||||||
|
|
||||||
|
|
||||||
@blueprint.route('/<string(length=24):node_path>/comments/<string(length=24):comment_path>', methods=['PATCH'])
|
|
||||||
@require_login(require_roles=ROLES_FOR_COMMENTING)
|
|
||||||
def patch_node_comment(node_path: str, comment_path: str):
|
|
||||||
node_id = str2id(node_path)
|
|
||||||
comment_id = str2id(comment_path)
|
|
||||||
msg = request.json['msg']
|
|
||||||
attachments = request.json.get('attachments', {})
|
|
||||||
return comments.patch_node_comment(node_id, comment_id, msg, attachments)
|
|
||||||
|
|
||||||
|
|
||||||
@blueprint.route('/<string(length=24):node_path>/comments/<string(length=24):comment_path>/vote', methods=['POST'])
|
|
||||||
@require_login(require_roles=ROLES_FOR_COMMENTING)
|
|
||||||
def post_node_comment_vote(node_path: str, comment_path: str):
|
|
||||||
node_id = str2id(node_path)
|
|
||||||
comment_id = str2id(comment_path)
|
|
||||||
vote_str = request.json['vote']
|
|
||||||
vote = int(vote_str)
|
|
||||||
return comments.post_node_comment_vote(node_id, comment_id, vote)
|
|
||||||
|
|
||||||
|
|
||||||
@blueprint.route('/<string(length=24):node_path>/activities', methods=['GET'])
|
|
||||||
def activities_for_node(node_path: str):
|
|
||||||
node_id = str2id(node_path)
|
|
||||||
return jsonify(activities.for_node(node_id))
|
|
||||||
|
|
||||||
|
|
||||||
@blueprint.route('/tagged/')
|
|
||||||
@blueprint.route('/tagged/<tag>')
|
|
||||||
def tagged(tag=''):
|
|
||||||
"""Return all tagged nodes of public projects as JSON."""
|
|
||||||
from pillar.auth import current_user
|
|
||||||
|
|
||||||
# We explicitly register the tagless endpoint to raise a 404, otherwise the PATCH
|
|
||||||
# handler on /api/nodes/<node_id> will return a 405 Method Not Allowed.
|
|
||||||
if not tag:
|
|
||||||
raise wz_exceptions.NotFound()
|
|
||||||
|
|
||||||
# Build the (cached) list of tagged nodes
|
|
||||||
agg_list = _tagged(tag)
|
|
||||||
|
|
||||||
for node in agg_list:
|
|
||||||
if node['properties'].get('duration_seconds'):
|
|
||||||
node['properties']['duration'] = datetime.timedelta(seconds=node['properties']['duration_seconds'])
|
|
||||||
|
|
||||||
if node.get('_created') is not None:
|
|
||||||
node['pretty_created'] = pretty_date(node['_created'])
|
|
||||||
|
|
||||||
# If the user is anonymous, no more information is needed and we return
|
|
||||||
if current_user.is_anonymous:
|
|
||||||
return jsonify(agg_list)
|
|
||||||
|
|
||||||
# If the user is authenticated, attach view_progress for video assets
|
|
||||||
view_progress = current_user.nodes['view_progress']
|
|
||||||
for node in agg_list:
|
|
||||||
node_id = str(node['_id'])
|
|
||||||
# View progress should be added only for nodes of type 'asset' and
|
|
||||||
# with content_type 'video', only if the video was already in the watched
|
|
||||||
# list for the current user.
|
|
||||||
if node_id in view_progress:
|
|
||||||
node['view_progress'] = view_progress[node_id]
|
|
||||||
|
|
||||||
return jsonify(agg_list)
|
|
||||||
|
|
||||||
|
|
||||||
def _tagged(tag: str):
|
|
||||||
"""Fetch all public nodes with the given tag.
|
|
||||||
|
|
||||||
This function is cached, see setup_app().
|
|
||||||
"""
|
|
||||||
nodes_coll = current_app.db('nodes')
|
|
||||||
agg = nodes_coll.aggregate([
|
|
||||||
{'$match': {'properties.tags': tag,
|
|
||||||
'_deleted': {'$ne': True}}},
|
|
||||||
|
|
||||||
# Only get nodes from public projects. This is done after matching the
|
|
||||||
# tagged nodes, because most likely nobody else will be able to tag
|
|
||||||
# nodes anyway.
|
|
||||||
{'$lookup': {
|
|
||||||
'from': 'projects',
|
|
||||||
'localField': 'project',
|
|
||||||
'foreignField': '_id',
|
|
||||||
'as': '_project',
|
|
||||||
}},
|
|
||||||
{'$unwind': '$_project'},
|
|
||||||
{'$match': {'_project.is_private': False}},
|
|
||||||
{'$addFields': {
|
|
||||||
'project._id': '$_project._id',
|
|
||||||
'project.name': '$_project.name',
|
|
||||||
'project.url': '$_project.url',
|
|
||||||
}},
|
|
||||||
|
|
||||||
# Don't return the entire project/file for each node.
|
|
||||||
{'$project': {'_project': False}},
|
|
||||||
{'$sort': {'_created': -1}}
|
|
||||||
])
|
|
||||||
|
|
||||||
return list(agg)
|
|
||||||
|
|
||||||
|
|
||||||
def generate_and_store_short_code(node):
|
def generate_and_store_short_code(node):
|
||||||
@ -240,35 +163,265 @@ def create_short_code(node) -> str:
|
|||||||
return short_code
|
return short_code
|
||||||
|
|
||||||
|
|
||||||
|
def short_link_info(short_code):
|
||||||
|
"""Returns the short link info in a dict."""
|
||||||
|
|
||||||
|
short_link = urllib.parse.urljoin(
|
||||||
|
current_app.config['SHORT_LINK_BASE_URL'], short_code)
|
||||||
|
|
||||||
|
return {
|
||||||
|
'short_code': short_code,
|
||||||
|
'short_link': short_link,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def before_replacing_node(item, original):
|
||||||
|
check_permissions('nodes', original, 'PUT')
|
||||||
|
update_file_name(item)
|
||||||
|
|
||||||
|
|
||||||
|
def after_replacing_node(item, original):
|
||||||
|
"""Push an update to the Algolia index when a node item is updated. If the
|
||||||
|
project is private, prevent public indexing.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from pillar.celery import search_index_tasks as index
|
||||||
|
|
||||||
|
projects_collection = current_app.data.driver.db['projects']
|
||||||
|
project = projects_collection.find_one({'_id': item['project']})
|
||||||
|
if project.get('is_private', False):
|
||||||
|
# Skip index updating and return
|
||||||
|
return
|
||||||
|
|
||||||
|
status = item['properties'].get('status', 'unpublished')
|
||||||
|
node_id = str(item['_id'])
|
||||||
|
|
||||||
|
if status == 'published':
|
||||||
|
index.node_save.delay(node_id)
|
||||||
|
else:
|
||||||
|
index.node_delete.delay(node_id)
|
||||||
|
|
||||||
|
|
||||||
|
def before_inserting_nodes(items):
|
||||||
|
"""Before inserting a node in the collection we check if the user is allowed
|
||||||
|
and we append the project id to it.
|
||||||
|
"""
|
||||||
|
from pillar.auth import current_user
|
||||||
|
|
||||||
|
nodes_collection = current_app.data.driver.db['nodes']
|
||||||
|
|
||||||
|
def find_parent_project(node):
|
||||||
|
"""Recursive function that finds the ultimate parent of a node."""
|
||||||
|
if node and 'parent' in node:
|
||||||
|
parent = nodes_collection.find_one({'_id': node['parent']})
|
||||||
|
return find_parent_project(parent)
|
||||||
|
if node:
|
||||||
|
return node
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
for item in items:
|
||||||
|
check_permissions('nodes', item, 'POST')
|
||||||
|
if 'parent' in item and 'project' not in item:
|
||||||
|
parent = nodes_collection.find_one({'_id': item['parent']})
|
||||||
|
project = find_parent_project(parent)
|
||||||
|
if project:
|
||||||
|
item['project'] = project['_id']
|
||||||
|
|
||||||
|
# Default the 'user' property to the current user.
|
||||||
|
item.setdefault('user', current_user.user_id)
|
||||||
|
|
||||||
|
|
||||||
|
def after_inserting_nodes(items):
|
||||||
|
for item in items:
|
||||||
|
# Skip subscriptions for first level items (since the context is not a
|
||||||
|
# node, but a project).
|
||||||
|
# TODO: support should be added for mixed context
|
||||||
|
if 'parent' not in item:
|
||||||
|
return
|
||||||
|
context_object_id = item['parent']
|
||||||
|
if item['node_type'] == 'comment':
|
||||||
|
nodes_collection = current_app.data.driver.db['nodes']
|
||||||
|
parent = nodes_collection.find_one({'_id': item['parent']})
|
||||||
|
# Always subscribe to the parent node
|
||||||
|
activity_subscribe(item['user'], 'node', item['parent'])
|
||||||
|
if parent['node_type'] == 'comment':
|
||||||
|
# If the parent is a comment, we provide its own parent as
|
||||||
|
# context. We do this in order to point the user to an asset
|
||||||
|
# or group when viewing the notification.
|
||||||
|
verb = 'replied'
|
||||||
|
context_object_id = parent['parent']
|
||||||
|
# Subscribe to the parent of the parent comment (post or group)
|
||||||
|
activity_subscribe(item['user'], 'node', parent['parent'])
|
||||||
|
else:
|
||||||
|
activity_subscribe(item['user'], 'node', item['_id'])
|
||||||
|
verb = 'commented'
|
||||||
|
elif item['node_type'] in PILLAR_NAMED_NODE_TYPES:
|
||||||
|
verb = 'posted'
|
||||||
|
activity_subscribe(item['user'], 'node', item['_id'])
|
||||||
|
else:
|
||||||
|
# Don't automatically create activities for non-Pillar node types,
|
||||||
|
# as we don't know what would be a suitable verb (among other things).
|
||||||
|
continue
|
||||||
|
|
||||||
|
activity_object_add(
|
||||||
|
item['user'],
|
||||||
|
verb,
|
||||||
|
'node',
|
||||||
|
item['_id'],
|
||||||
|
'node',
|
||||||
|
context_object_id
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def deduct_content_type(node_doc, original=None):
|
||||||
|
"""Deduct the content type from the attached file, if any."""
|
||||||
|
|
||||||
|
if node_doc['node_type'] != 'asset':
|
||||||
|
log.debug('deduct_content_type: called on node type %r, ignoring', node_doc['node_type'])
|
||||||
|
return
|
||||||
|
|
||||||
|
node_id = node_doc.get('_id')
|
||||||
|
try:
|
||||||
|
file_id = ObjectId(node_doc['properties']['file'])
|
||||||
|
except KeyError:
|
||||||
|
if node_id is None:
|
||||||
|
# Creation of a file-less node is allowed, but updates aren't.
|
||||||
|
return
|
||||||
|
log.warning('deduct_content_type: Asset without properties.file, rejecting.')
|
||||||
|
raise wz_exceptions.UnprocessableEntity('Missing file property for asset node')
|
||||||
|
|
||||||
|
files = current_app.data.driver.db['files']
|
||||||
|
file_doc = files.find_one({'_id': file_id},
|
||||||
|
{'content_type': 1})
|
||||||
|
if not file_doc:
|
||||||
|
log.warning('deduct_content_type: Node %s refers to non-existing file %s, rejecting.',
|
||||||
|
node_id, file_id)
|
||||||
|
raise wz_exceptions.UnprocessableEntity('File property refers to non-existing file')
|
||||||
|
|
||||||
|
# Guess the node content type from the file content type
|
||||||
|
file_type = file_doc['content_type']
|
||||||
|
if file_type.startswith('video/'):
|
||||||
|
content_type = 'video'
|
||||||
|
elif file_type.startswith('image/'):
|
||||||
|
content_type = 'image'
|
||||||
|
else:
|
||||||
|
content_type = 'file'
|
||||||
|
|
||||||
|
node_doc['properties']['content_type'] = content_type
|
||||||
|
|
||||||
|
|
||||||
|
def nodes_deduct_content_type(nodes):
|
||||||
|
for node in nodes:
|
||||||
|
deduct_content_type(node)
|
||||||
|
|
||||||
|
|
||||||
|
def before_returning_node(node):
|
||||||
|
# Run validation process, since GET on nodes entry point is public
|
||||||
|
check_permissions('nodes', node, 'GET', append_allowed_methods=True)
|
||||||
|
|
||||||
|
# Embed short_link_info if the node has a short_code.
|
||||||
|
short_code = node.get('short_code')
|
||||||
|
if short_code:
|
||||||
|
node['short_link'] = short_link_info(short_code)['short_link']
|
||||||
|
|
||||||
|
|
||||||
|
def before_returning_nodes(nodes):
|
||||||
|
for node in nodes['_items']:
|
||||||
|
before_returning_node(node)
|
||||||
|
|
||||||
|
|
||||||
|
def node_set_default_picture(node, original=None):
|
||||||
|
"""Uses the image of an image asset or colour map of texture node as picture."""
|
||||||
|
|
||||||
|
if node.get('picture'):
|
||||||
|
log.debug('Node %s already has a picture, not overriding', node.get('_id'))
|
||||||
|
return
|
||||||
|
|
||||||
|
node_type = node.get('node_type')
|
||||||
|
props = node.get('properties', {})
|
||||||
|
content = props.get('content_type')
|
||||||
|
|
||||||
|
if node_type == 'asset' and content == 'image':
|
||||||
|
image_file_id = props.get('file')
|
||||||
|
elif node_type == 'texture':
|
||||||
|
# Find the colour map, defaulting to the first image map available.
|
||||||
|
image_file_id = None
|
||||||
|
for image in props.get('files', []):
|
||||||
|
if image_file_id is None or image.get('map_type') == 'color':
|
||||||
|
image_file_id = image.get('file')
|
||||||
|
else:
|
||||||
|
log.debug('Not setting default picture on node type %s content type %s',
|
||||||
|
node_type, content)
|
||||||
|
return
|
||||||
|
|
||||||
|
if image_file_id is None:
|
||||||
|
log.debug('Nothing to set the picture to.')
|
||||||
|
return
|
||||||
|
|
||||||
|
log.debug('Setting default picture for node %s to %s', node.get('_id'), image_file_id)
|
||||||
|
node['picture'] = image_file_id
|
||||||
|
|
||||||
|
|
||||||
|
def nodes_set_default_picture(nodes):
|
||||||
|
for node in nodes:
|
||||||
|
node_set_default_picture(node)
|
||||||
|
|
||||||
|
|
||||||
|
def before_deleting_node(node: dict):
|
||||||
|
check_permissions('nodes', node, 'DELETE')
|
||||||
|
|
||||||
|
|
||||||
|
def after_deleting_node(item):
|
||||||
|
from pillar.celery import search_index_tasks as index
|
||||||
|
index.node_delete.delay(str(item['_id']))
|
||||||
|
|
||||||
|
|
||||||
|
only_for_textures = only_for_node_type_decorator('texture')
|
||||||
|
|
||||||
|
|
||||||
|
@only_for_textures
|
||||||
|
def texture_sort_files(node, original=None):
|
||||||
|
"""Sort files alphabetically by map type, with colour map first."""
|
||||||
|
|
||||||
|
try:
|
||||||
|
files = node['properties']['files']
|
||||||
|
except KeyError:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Sort the map types alphabetically, ensuring 'color' comes first.
|
||||||
|
as_dict = {f['map_type']: f for f in files}
|
||||||
|
types = sorted(as_dict.keys(), key=lambda k: '\0' if k == 'color' else k)
|
||||||
|
node['properties']['files'] = [as_dict[map_type] for map_type in types]
|
||||||
|
|
||||||
|
|
||||||
|
def textures_sort_files(nodes):
|
||||||
|
for node in nodes:
|
||||||
|
texture_sort_files(node)
|
||||||
|
|
||||||
|
|
||||||
def setup_app(app, url_prefix):
|
def setup_app(app, url_prefix):
|
||||||
global _tagged
|
|
||||||
|
|
||||||
cached = app.cache.memoize(timeout=300)
|
|
||||||
_tagged = cached(_tagged)
|
|
||||||
|
|
||||||
from . import patch
|
from . import patch
|
||||||
patch.setup_app(app, url_prefix=url_prefix)
|
patch.setup_app(app, url_prefix=url_prefix)
|
||||||
|
|
||||||
app.on_fetched_item_nodes += eve_hooks.before_returning_node
|
app.on_fetched_item_nodes += before_returning_node
|
||||||
app.on_fetched_resource_nodes += eve_hooks.before_returning_nodes
|
app.on_fetched_resource_nodes += before_returning_nodes
|
||||||
|
|
||||||
app.on_replace_nodes += eve_hooks.before_replacing_node
|
app.on_replace_nodes += before_replacing_node
|
||||||
app.on_replace_nodes += eve_hooks.texture_sort_files
|
app.on_replace_nodes += texture_sort_files
|
||||||
app.on_replace_nodes += eve_hooks.deduct_content_type_and_duration
|
app.on_replace_nodes += deduct_content_type
|
||||||
app.on_replace_nodes += eve_hooks.node_set_default_picture
|
app.on_replace_nodes += node_set_default_picture
|
||||||
app.on_replaced_nodes += eve_hooks.after_replacing_node
|
app.on_replaced_nodes += after_replacing_node
|
||||||
|
|
||||||
app.on_insert_nodes += eve_hooks.before_inserting_nodes
|
app.on_insert_nodes += before_inserting_nodes
|
||||||
app.on_insert_nodes += eve_hooks.nodes_deduct_content_type_and_duration
|
app.on_insert_nodes += nodes_deduct_content_type
|
||||||
app.on_insert_nodes += eve_hooks.nodes_set_default_picture
|
app.on_insert_nodes += nodes_set_default_picture
|
||||||
app.on_insert_nodes += eve_hooks.textures_sort_files
|
app.on_insert_nodes += textures_sort_files
|
||||||
app.on_inserted_nodes += eve_hooks.after_inserting_nodes
|
app.on_inserted_nodes += after_inserting_nodes
|
||||||
|
|
||||||
app.on_update_nodes += eve_hooks.texture_sort_files
|
app.on_update_nodes += texture_sort_files
|
||||||
|
|
||||||
app.on_delete_item_nodes += eve_hooks.before_deleting_node
|
app.on_delete_item_nodes += before_deleting_node
|
||||||
app.on_deleted_item_nodes += eve_hooks.after_deleting_node
|
app.on_deleted_item_nodes += after_deleting_node
|
||||||
|
|
||||||
app.register_api_blueprint(blueprint, url_prefix=url_prefix)
|
app.register_api_blueprint(blueprint, url_prefix=url_prefix)
|
||||||
|
|
||||||
activities.setup_app(app)
|
|
||||||
|
@ -1,43 +0,0 @@
|
|||||||
from eve.methods import get
|
|
||||||
|
|
||||||
import pillar.api.users.avatar
|
|
||||||
|
|
||||||
|
|
||||||
def for_node(node_id):
|
|
||||||
activities, _, _, status, _ =\
|
|
||||||
get('activities',
|
|
||||||
{
|
|
||||||
'$or': [
|
|
||||||
{'object_type': 'node',
|
|
||||||
'object': node_id},
|
|
||||||
{'context_object_type': 'node',
|
|
||||||
'context_object': node_id},
|
|
||||||
],
|
|
||||||
},)
|
|
||||||
|
|
||||||
for act in activities['_items']:
|
|
||||||
act['actor_user'] = _user_info(act['actor_user'])
|
|
||||||
|
|
||||||
return activities
|
|
||||||
|
|
||||||
|
|
||||||
def _user_info(user_id):
|
|
||||||
users, _, _, status, _ = get('users', {'_id': user_id})
|
|
||||||
if len(users['_items']) > 0:
|
|
||||||
user = users['_items'][0]
|
|
||||||
user['avatar'] = pillar.api.users.avatar.url(user)
|
|
||||||
|
|
||||||
public_fields = {'full_name', 'username', 'avatar'}
|
|
||||||
for field in list(user.keys()):
|
|
||||||
if field not in public_fields:
|
|
||||||
del user[field]
|
|
||||||
|
|
||||||
return user
|
|
||||||
return {}
|
|
||||||
|
|
||||||
|
|
||||||
def setup_app(app):
|
|
||||||
global _user_info
|
|
||||||
|
|
||||||
decorator = app.cache.memoize(timeout=300, make_name='%s.public_user_info' % __name__)
|
|
||||||
_user_info = decorator(_user_info)
|
|
@ -1,302 +0,0 @@
|
|||||||
import logging
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
import pymongo
|
|
||||||
import typing
|
|
||||||
|
|
||||||
import bson
|
|
||||||
import attr
|
|
||||||
import werkzeug.exceptions as wz_exceptions
|
|
||||||
|
|
||||||
import pillar
|
|
||||||
from pillar import current_app, shortcodes
|
|
||||||
import pillar.api.users.avatar
|
|
||||||
from pillar.api.nodes.custom.comment import patch_comment
|
|
||||||
from pillar.api.utils import jsonify
|
|
||||||
from pillar.auth import current_user
|
|
||||||
import pillar.markdown
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
@attr.s(auto_attribs=True)
|
|
||||||
class UserDO:
|
|
||||||
id: str
|
|
||||||
full_name: str
|
|
||||||
avatar_url: str
|
|
||||||
badges_html: str
|
|
||||||
|
|
||||||
|
|
||||||
@attr.s(auto_attribs=True)
|
|
||||||
class CommentPropertiesDO:
|
|
||||||
attachments: typing.Dict
|
|
||||||
rating_positive: int = 0
|
|
||||||
rating_negative: int = 0
|
|
||||||
|
|
||||||
|
|
||||||
@attr.s(auto_attribs=True)
|
|
||||||
class CommentDO:
|
|
||||||
id: bson.ObjectId
|
|
||||||
parent: bson.ObjectId
|
|
||||||
project: bson.ObjectId
|
|
||||||
user: UserDO
|
|
||||||
msg_html: str
|
|
||||||
msg_markdown: str
|
|
||||||
properties: CommentPropertiesDO
|
|
||||||
created: datetime
|
|
||||||
updated: datetime
|
|
||||||
etag: str
|
|
||||||
replies: typing.List['CommentDO'] = []
|
|
||||||
current_user_rating: typing.Optional[bool] = None
|
|
||||||
|
|
||||||
|
|
||||||
@attr.s(auto_attribs=True)
|
|
||||||
class CommentTreeDO:
|
|
||||||
node_id: bson.ObjectId
|
|
||||||
project: bson.ObjectId
|
|
||||||
nbr_of_comments: int = 0
|
|
||||||
comments: typing.List[CommentDO] = []
|
|
||||||
|
|
||||||
|
|
||||||
def _get_markdowned_html(document: dict, field_name: str) -> str:
|
|
||||||
cache_field_name = pillar.markdown.cache_field_name(field_name)
|
|
||||||
html = document.get(cache_field_name)
|
|
||||||
if html is None:
|
|
||||||
markdown_src = document.get(field_name) or ''
|
|
||||||
html = pillar.markdown.markdown(markdown_src)
|
|
||||||
return html
|
|
||||||
|
|
||||||
|
|
||||||
def jsonify_data_object(data_object: attr):
|
|
||||||
return jsonify(
|
|
||||||
attr.asdict(data_object,
|
|
||||||
recurse=True)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class CommentTreeBuilder:
|
|
||||||
def __init__(self, node_id: bson.ObjectId):
|
|
||||||
self.node_id = node_id
|
|
||||||
self.nbr_of_Comments: int = 0
|
|
||||||
|
|
||||||
def build(self) -> CommentTreeDO:
|
|
||||||
enriched_comments = self.child_comments(
|
|
||||||
self.node_id,
|
|
||||||
sort={'properties.rating_positive': pymongo.DESCENDING,
|
|
||||||
'_created': pymongo.DESCENDING})
|
|
||||||
project_id = self.get_project_id()
|
|
||||||
return CommentTreeDO(
|
|
||||||
node_id=self.node_id,
|
|
||||||
project=project_id,
|
|
||||||
nbr_of_comments=self.nbr_of_Comments,
|
|
||||||
comments=enriched_comments
|
|
||||||
)
|
|
||||||
|
|
||||||
def child_comments(self, node_id: bson.ObjectId, sort: dict) -> typing.List[CommentDO]:
|
|
||||||
raw_comments = self.mongodb_comments(node_id, sort)
|
|
||||||
return [self.enrich(comment) for comment in raw_comments]
|
|
||||||
|
|
||||||
def enrich(self, mongo_comment: dict) -> CommentDO:
|
|
||||||
self.nbr_of_Comments += 1
|
|
||||||
comment = to_comment_data_object(mongo_comment)
|
|
||||||
comment.replies = self.child_comments(mongo_comment['_id'],
|
|
||||||
sort={'_created': pymongo.ASCENDING})
|
|
||||||
return comment
|
|
||||||
|
|
||||||
def get_project_id(self):
|
|
||||||
nodes_coll = current_app.db('nodes')
|
|
||||||
result = nodes_coll.find_one({'_id': self.node_id})
|
|
||||||
return result['project']
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def mongodb_comments(cls, node_id: bson.ObjectId, sort: dict) -> typing.Iterator:
|
|
||||||
nodes_coll = current_app.db('nodes')
|
|
||||||
return nodes_coll.aggregate([
|
|
||||||
{'$match': {'node_type': 'comment',
|
|
||||||
'_deleted': {'$ne': True},
|
|
||||||
'properties.status': 'published',
|
|
||||||
'parent': node_id}},
|
|
||||||
{'$lookup': {"from": "users",
|
|
||||||
"localField": "user",
|
|
||||||
"foreignField": "_id",
|
|
||||||
"as": "user"}},
|
|
||||||
{'$unwind': {'path': "$user"}},
|
|
||||||
{'$sort': sort},
|
|
||||||
])
|
|
||||||
|
|
||||||
|
|
||||||
def get_node_comments(node_id: bson.ObjectId):
|
|
||||||
comments_tree = CommentTreeBuilder(node_id).build()
|
|
||||||
return jsonify_data_object(comments_tree)
|
|
||||||
|
|
||||||
|
|
||||||
def post_node_comment(parent_id: bson.ObjectId, markdown_msg: str, attachments: dict):
|
|
||||||
parent_node = find_node_or_raise(parent_id,
|
|
||||||
'User %s tried to update comment with bad parent_id %s',
|
|
||||||
current_user.objectid,
|
|
||||||
parent_id)
|
|
||||||
|
|
||||||
is_reply = parent_node['node_type'] == 'comment'
|
|
||||||
comment = dict(
|
|
||||||
parent=parent_id,
|
|
||||||
project=parent_node['project'],
|
|
||||||
name='Comment',
|
|
||||||
user=current_user.objectid,
|
|
||||||
node_type='comment',
|
|
||||||
properties=dict(
|
|
||||||
content=markdown_msg,
|
|
||||||
status='published',
|
|
||||||
is_reply=is_reply,
|
|
||||||
confidence=0,
|
|
||||||
rating_positive=0,
|
|
||||||
rating_negative=0,
|
|
||||||
attachments=attachments,
|
|
||||||
),
|
|
||||||
permissions=dict(
|
|
||||||
users=[dict(
|
|
||||||
user=current_user.objectid,
|
|
||||||
methods=['PUT'])
|
|
||||||
]
|
|
||||||
)
|
|
||||||
)
|
|
||||||
r, _, _, status = current_app.post_internal('nodes', comment)
|
|
||||||
|
|
||||||
if status != 201:
|
|
||||||
log.warning('Unable to post comment on %s as %s: %s',
|
|
||||||
parent_id, current_user.objectid, r)
|
|
||||||
raise wz_exceptions.InternalServerError('Unable to create comment')
|
|
||||||
|
|
||||||
comment_do = get_comment(parent_id, r['_id'])
|
|
||||||
|
|
||||||
return jsonify_data_object(comment_do), 201
|
|
||||||
|
|
||||||
|
|
||||||
def find_node_or_raise(node_id, *args):
|
|
||||||
nodes_coll = current_app.db('nodes')
|
|
||||||
node_to_comment = nodes_coll.find_one({
|
|
||||||
'_id': node_id,
|
|
||||||
'_deleted': {'$ne': True},
|
|
||||||
})
|
|
||||||
if not node_to_comment:
|
|
||||||
log.warning(args)
|
|
||||||
raise wz_exceptions.UnprocessableEntity()
|
|
||||||
return node_to_comment
|
|
||||||
|
|
||||||
|
|
||||||
def patch_node_comment(parent_id: bson.ObjectId,
|
|
||||||
comment_id: bson.ObjectId,
|
|
||||||
markdown_msg: str,
|
|
||||||
attachments: dict):
|
|
||||||
_, _ = find_parent_and_comment_or_raise(parent_id, comment_id)
|
|
||||||
|
|
||||||
patch = dict(
|
|
||||||
op='edit',
|
|
||||||
content=markdown_msg,
|
|
||||||
attachments=attachments
|
|
||||||
)
|
|
||||||
|
|
||||||
json_result = patch_comment(comment_id, patch)
|
|
||||||
if json_result.json['result'] != 200:
|
|
||||||
raise wz_exceptions.InternalServerError('Failed to update comment')
|
|
||||||
|
|
||||||
comment_do = get_comment(parent_id, comment_id)
|
|
||||||
|
|
||||||
return jsonify_data_object(comment_do), 200
|
|
||||||
|
|
||||||
|
|
||||||
def find_parent_and_comment_or_raise(parent_id, comment_id):
|
|
||||||
parent = find_node_or_raise(parent_id,
|
|
||||||
'User %s tried to update comment with bad parent_id %s',
|
|
||||||
current_user.objectid,
|
|
||||||
parent_id)
|
|
||||||
comment = find_node_or_raise(comment_id,
|
|
||||||
'User %s tried to update comment with bad id %s',
|
|
||||||
current_user.objectid,
|
|
||||||
comment_id)
|
|
||||||
validate_comment_parent_relation(comment, parent)
|
|
||||||
return parent, comment
|
|
||||||
|
|
||||||
|
|
||||||
def validate_comment_parent_relation(comment, parent):
|
|
||||||
if comment['parent'] != parent['_id']:
|
|
||||||
log.warning('User %s tried to update comment with bad parent/comment pair.'
|
|
||||||
' parent_id: %s comment_id: %s',
|
|
||||||
current_user.objectid, parent['_id'], comment['_id'])
|
|
||||||
raise wz_exceptions.BadRequest()
|
|
||||||
|
|
||||||
|
|
||||||
def get_comment(parent_id: bson.ObjectId, comment_id: bson.ObjectId) -> CommentDO:
|
|
||||||
nodes_coll = current_app.db('nodes')
|
|
||||||
mongo_comment = list(nodes_coll.aggregate([
|
|
||||||
{'$match': {'node_type': 'comment',
|
|
||||||
'_deleted': {'$ne': True},
|
|
||||||
'properties.status': 'published',
|
|
||||||
'parent': parent_id,
|
|
||||||
'_id': comment_id}},
|
|
||||||
{'$lookup': {"from": "users",
|
|
||||||
"localField": "user",
|
|
||||||
"foreignField": "_id",
|
|
||||||
"as": "user"}},
|
|
||||||
{'$unwind': {'path': "$user"}},
|
|
||||||
]))[0]
|
|
||||||
|
|
||||||
return to_comment_data_object(mongo_comment)
|
|
||||||
|
|
||||||
|
|
||||||
def to_comment_data_object(mongo_comment: dict) -> CommentDO:
|
|
||||||
def current_user_rating():
|
|
||||||
if current_user.is_authenticated:
|
|
||||||
for rating in mongo_comment['properties'].get('ratings', ()):
|
|
||||||
if str(rating['user']) != current_user.objectid:
|
|
||||||
continue
|
|
||||||
return rating['is_positive']
|
|
||||||
return None
|
|
||||||
|
|
||||||
user_dict = mongo_comment['user']
|
|
||||||
user = UserDO(
|
|
||||||
id=str(mongo_comment['user']['_id']),
|
|
||||||
full_name=user_dict['full_name'],
|
|
||||||
avatar_url=pillar.api.users.avatar.url(user_dict),
|
|
||||||
badges_html=user_dict.get('badges', {}).get('html', '')
|
|
||||||
)
|
|
||||||
html = _get_markdowned_html(mongo_comment['properties'], 'content')
|
|
||||||
html = shortcodes.render_commented(html, context=mongo_comment['properties'])
|
|
||||||
return CommentDO(
|
|
||||||
id=mongo_comment['_id'],
|
|
||||||
parent=mongo_comment['parent'],
|
|
||||||
project=mongo_comment['project'],
|
|
||||||
user=user,
|
|
||||||
msg_html=html,
|
|
||||||
msg_markdown=mongo_comment['properties']['content'],
|
|
||||||
current_user_rating=current_user_rating(),
|
|
||||||
created=mongo_comment['_created'],
|
|
||||||
updated=mongo_comment['_updated'],
|
|
||||||
etag=mongo_comment['_etag'],
|
|
||||||
properties=CommentPropertiesDO(
|
|
||||||
attachments=mongo_comment['properties'].get('attachments', {}),
|
|
||||||
rating_positive=mongo_comment['properties']['rating_positive'],
|
|
||||||
rating_negative=mongo_comment['properties']['rating_negative']
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def post_node_comment_vote(parent_id: bson.ObjectId, comment_id: bson.ObjectId, vote: int):
|
|
||||||
normalized_vote = min(max(vote, -1), 1)
|
|
||||||
_, _ = find_parent_and_comment_or_raise(parent_id, comment_id)
|
|
||||||
|
|
||||||
actions = {
|
|
||||||
1: 'upvote',
|
|
||||||
0: 'revoke',
|
|
||||||
-1: 'downvote',
|
|
||||||
}
|
|
||||||
|
|
||||||
patch = dict(
|
|
||||||
op=actions[normalized_vote]
|
|
||||||
)
|
|
||||||
|
|
||||||
json_result = patch_comment(comment_id, patch)
|
|
||||||
if json_result.json['_status'] != 'OK':
|
|
||||||
raise wz_exceptions.InternalServerError('Failed to vote on comment')
|
|
||||||
|
|
||||||
comment_do = get_comment(parent_id, comment_id)
|
|
||||||
return jsonify_data_object(comment_do), 200
|
|
@ -5,7 +5,7 @@ import logging
|
|||||||
from flask import current_app
|
from flask import current_app
|
||||||
import werkzeug.exceptions as wz_exceptions
|
import werkzeug.exceptions as wz_exceptions
|
||||||
|
|
||||||
from pillar.api.utils import authorization, authentication, jsonify, remove_private_keys
|
from pillar.api.utils import authorization, authentication, jsonify
|
||||||
|
|
||||||
from . import register_patch_handler
|
from . import register_patch_handler
|
||||||
|
|
||||||
@ -135,7 +135,10 @@ def edit_comment(user_id, node_id, patch):
|
|||||||
# we can pass this stuff to Eve's patch_internal; that way the validation &
|
# we can pass this stuff to Eve's patch_internal; that way the validation &
|
||||||
# authorisation system has enough info to work.
|
# authorisation system has enough info to work.
|
||||||
nodes_coll = current_app.data.driver.db['nodes']
|
nodes_coll = current_app.data.driver.db['nodes']
|
||||||
node = nodes_coll.find_one(node_id)
|
projection = {'user': 1,
|
||||||
|
'project': 1,
|
||||||
|
'node_type': 1}
|
||||||
|
node = nodes_coll.find_one(node_id, projection=projection)
|
||||||
if node is None:
|
if node is None:
|
||||||
log.warning('User %s wanted to patch non-existing node %s' % (user_id, node_id))
|
log.warning('User %s wanted to patch non-existing node %s' % (user_id, node_id))
|
||||||
raise wz_exceptions.NotFound('Node %s not found' % node_id)
|
raise wz_exceptions.NotFound('Node %s not found' % node_id)
|
||||||
@ -143,12 +146,12 @@ def edit_comment(user_id, node_id, patch):
|
|||||||
if node['user'] != user_id and not authorization.user_has_role('admin'):
|
if node['user'] != user_id and not authorization.user_has_role('admin'):
|
||||||
raise wz_exceptions.Forbidden('You can only edit your own comments.')
|
raise wz_exceptions.Forbidden('You can only edit your own comments.')
|
||||||
|
|
||||||
node = remove_private_keys(node)
|
# Use Eve to PATCH this node, as that also updates the etag.
|
||||||
node['properties']['content'] = patch['content']
|
r, _, _, status = current_app.patch_internal('nodes',
|
||||||
node['properties']['attachments'] = patch.get('attachments', {})
|
{'properties.content': patch['content'],
|
||||||
# Use Eve to PUT this node, as that also updates the etag and we want to replace attachments.
|
'project': node['project'],
|
||||||
r, _, _, status = current_app.put_internal('nodes',
|
'user': node['user'],
|
||||||
node,
|
'node_type': node['node_type']},
|
||||||
concurrency_check=False,
|
concurrency_check=False,
|
||||||
_id=node_id)
|
_id=node_id)
|
||||||
if status != 200:
|
if status != 200:
|
||||||
|
@ -1,336 +0,0 @@
|
|||||||
import collections
|
|
||||||
import functools
|
|
||||||
import logging
|
|
||||||
import urllib.parse
|
|
||||||
|
|
||||||
from bson import ObjectId
|
|
||||||
from werkzeug import exceptions as wz_exceptions
|
|
||||||
|
|
||||||
from pillar import current_app
|
|
||||||
from pillar.api.activities import activity_subscribe, activity_object_add
|
|
||||||
from pillar.api.file_storage_backends.gcs import update_file_name
|
|
||||||
from pillar.api.node_types import PILLAR_NAMED_NODE_TYPES
|
|
||||||
from pillar.api.utils import random_etag
|
|
||||||
from pillar.api.utils.authorization import check_permissions
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def before_returning_node(node):
|
|
||||||
# Run validation process, since GET on nodes entry point is public
|
|
||||||
check_permissions('nodes', node, 'GET', append_allowed_methods=True)
|
|
||||||
|
|
||||||
# Embed short_link_info if the node has a short_code.
|
|
||||||
short_code = node.get('short_code')
|
|
||||||
if short_code:
|
|
||||||
node['short_link'] = short_link_info(short_code)['short_link']
|
|
||||||
|
|
||||||
|
|
||||||
def before_returning_nodes(nodes):
|
|
||||||
for node in nodes['_items']:
|
|
||||||
before_returning_node(node)
|
|
||||||
|
|
||||||
|
|
||||||
def only_for_node_type_decorator(*required_node_type_names):
|
|
||||||
"""Returns a decorator that checks its first argument's node type.
|
|
||||||
|
|
||||||
If the node type is not of the required node type, returns None,
|
|
||||||
otherwise calls the wrapped function.
|
|
||||||
|
|
||||||
>>> deco = only_for_node_type_decorator('comment')
|
|
||||||
>>> @deco
|
|
||||||
... def handle_comment(node): pass
|
|
||||||
|
|
||||||
>>> deco = only_for_node_type_decorator('comment', 'post')
|
|
||||||
>>> @deco
|
|
||||||
... def handle_comment_or_post(node): pass
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Convert to a set for efficient 'x in required_node_type_names' queries.
|
|
||||||
required_node_type_names = set(required_node_type_names)
|
|
||||||
|
|
||||||
def only_for_node_type(wrapped):
|
|
||||||
@functools.wraps(wrapped)
|
|
||||||
def wrapper(node, *args, **kwargs):
|
|
||||||
if node.get('node_type') not in required_node_type_names:
|
|
||||||
return
|
|
||||||
|
|
||||||
return wrapped(node, *args, **kwargs)
|
|
||||||
|
|
||||||
return wrapper
|
|
||||||
|
|
||||||
only_for_node_type.__doc__ = "Decorator, immediately returns when " \
|
|
||||||
"the first argument is not of type %s." % required_node_type_names
|
|
||||||
return only_for_node_type
|
|
||||||
|
|
||||||
|
|
||||||
def before_replacing_node(item, original):
|
|
||||||
check_permissions('nodes', original, 'PUT')
|
|
||||||
update_file_name(item)
|
|
||||||
|
|
||||||
|
|
||||||
def after_replacing_node(item, original):
|
|
||||||
"""Push an update to the Algolia index when a node item is updated. If the
|
|
||||||
project is private, prevent public indexing.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from pillar.celery import search_index_tasks as index
|
|
||||||
|
|
||||||
projects_collection = current_app.data.driver.db['projects']
|
|
||||||
project = projects_collection.find_one({'_id': item['project']})
|
|
||||||
if project.get('is_private', False):
|
|
||||||
# Skip index updating and return
|
|
||||||
return
|
|
||||||
|
|
||||||
status = item['properties'].get('status', 'unpublished')
|
|
||||||
node_id = str(item['_id'])
|
|
||||||
|
|
||||||
if status == 'published':
|
|
||||||
index.node_save.delay(node_id)
|
|
||||||
else:
|
|
||||||
index.node_delete.delay(node_id)
|
|
||||||
|
|
||||||
|
|
||||||
def before_inserting_nodes(items):
|
|
||||||
"""Before inserting a node in the collection we check if the user is allowed
|
|
||||||
and we append the project id to it.
|
|
||||||
"""
|
|
||||||
from pillar.auth import current_user
|
|
||||||
|
|
||||||
nodes_collection = current_app.data.driver.db['nodes']
|
|
||||||
|
|
||||||
def find_parent_project(node):
|
|
||||||
"""Recursive function that finds the ultimate parent of a node."""
|
|
||||||
if node and 'parent' in node:
|
|
||||||
parent = nodes_collection.find_one({'_id': node['parent']})
|
|
||||||
return find_parent_project(parent)
|
|
||||||
if node:
|
|
||||||
return node
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
for item in items:
|
|
||||||
check_permissions('nodes', item, 'POST')
|
|
||||||
if 'parent' in item and 'project' not in item:
|
|
||||||
parent = nodes_collection.find_one({'_id': item['parent']})
|
|
||||||
project = find_parent_project(parent)
|
|
||||||
if project:
|
|
||||||
item['project'] = project['_id']
|
|
||||||
|
|
||||||
# Default the 'user' property to the current user.
|
|
||||||
item.setdefault('user', current_user.user_id)
|
|
||||||
|
|
||||||
|
|
||||||
def get_comment_verb_and_context_object_id(comment):
|
|
||||||
nodes_collection = current_app.data.driver.db['nodes']
|
|
||||||
verb = 'commented'
|
|
||||||
parent = nodes_collection.find_one({'_id': comment['parent']})
|
|
||||||
context_object_id = comment['parent']
|
|
||||||
while parent['node_type'] == 'comment':
|
|
||||||
# If the parent is a comment, we provide its own parent as
|
|
||||||
# context. We do this in order to point the user to an asset
|
|
||||||
# or group when viewing the notification.
|
|
||||||
verb = 'replied'
|
|
||||||
context_object_id = parent['parent']
|
|
||||||
parent = nodes_collection.find_one({'_id': parent['parent']})
|
|
||||||
return verb, context_object_id
|
|
||||||
|
|
||||||
|
|
||||||
def after_inserting_nodes(items):
|
|
||||||
for item in items:
|
|
||||||
context_object_id = None
|
|
||||||
# TODO: support should be added for mixed context
|
|
||||||
if item['node_type'] in PILLAR_NAMED_NODE_TYPES:
|
|
||||||
activity_subscribe(item['user'], 'node', item['_id'])
|
|
||||||
verb = 'posted'
|
|
||||||
context_object_id = item.get('parent')
|
|
||||||
if item['node_type'] == 'comment':
|
|
||||||
# Always subscribe to the parent node
|
|
||||||
activity_subscribe(item['user'], 'node', item['parent'])
|
|
||||||
verb, context_object_id = get_comment_verb_and_context_object_id(item)
|
|
||||||
# Subscribe to the parent of the parent comment (post or group)
|
|
||||||
activity_subscribe(item['user'], 'node', context_object_id)
|
|
||||||
|
|
||||||
if context_object_id and item['node_type'] in PILLAR_NAMED_NODE_TYPES:
|
|
||||||
# * Skip activity for first level items (since the context is not a
|
|
||||||
# node, but a project).
|
|
||||||
# * Don't automatically create activities for non-Pillar node types,
|
|
||||||
# as we don't know what would be a suitable verb (among other things).
|
|
||||||
activity_object_add(
|
|
||||||
item['user'],
|
|
||||||
verb,
|
|
||||||
'node',
|
|
||||||
item['_id'],
|
|
||||||
'node',
|
|
||||||
context_object_id
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def deduct_content_type_and_duration(node_doc, original=None):
|
|
||||||
"""Deduct the content type from the attached file, if any."""
|
|
||||||
|
|
||||||
if node_doc['node_type'] != 'asset':
|
|
||||||
log.debug('deduct_content_type: called on node type %r, ignoring', node_doc['node_type'])
|
|
||||||
return
|
|
||||||
|
|
||||||
node_id = node_doc.get('_id')
|
|
||||||
try:
|
|
||||||
file_id = ObjectId(node_doc['properties']['file'])
|
|
||||||
except KeyError:
|
|
||||||
if node_id is None:
|
|
||||||
# Creation of a file-less node is allowed, but updates aren't.
|
|
||||||
return
|
|
||||||
log.warning('deduct_content_type: Asset without properties.file, rejecting.')
|
|
||||||
raise wz_exceptions.UnprocessableEntity('Missing file property for asset node')
|
|
||||||
|
|
||||||
files = current_app.data.driver.db['files']
|
|
||||||
file_doc = files.find_one({'_id': file_id},
|
|
||||||
{'content_type': 1,
|
|
||||||
'variations': 1})
|
|
||||||
if not file_doc:
|
|
||||||
log.warning('deduct_content_type: Node %s refers to non-existing file %s, rejecting.',
|
|
||||||
node_id, file_id)
|
|
||||||
raise wz_exceptions.UnprocessableEntity('File property refers to non-existing file')
|
|
||||||
|
|
||||||
# Guess the node content type from the file content type
|
|
||||||
file_type = file_doc['content_type']
|
|
||||||
if file_type.startswith('video/'):
|
|
||||||
content_type = 'video'
|
|
||||||
elif file_type.startswith('image/'):
|
|
||||||
content_type = 'image'
|
|
||||||
else:
|
|
||||||
content_type = 'file'
|
|
||||||
|
|
||||||
node_doc['properties']['content_type'] = content_type
|
|
||||||
|
|
||||||
if content_type == 'video':
|
|
||||||
duration = file_doc['variations'][0].get('duration')
|
|
||||||
if duration:
|
|
||||||
node_doc['properties']['duration_seconds'] = duration
|
|
||||||
else:
|
|
||||||
log.warning('Video file %s has no duration', file_id)
|
|
||||||
|
|
||||||
|
|
||||||
def nodes_deduct_content_type_and_duration(nodes):
|
|
||||||
for node in nodes:
|
|
||||||
deduct_content_type_and_duration(node)
|
|
||||||
|
|
||||||
|
|
||||||
def node_set_default_picture(node, original=None):
|
|
||||||
"""Uses the image of an image asset or colour map of texture node as picture."""
|
|
||||||
|
|
||||||
if node.get('picture'):
|
|
||||||
log.debug('Node %s already has a picture, not overriding', node.get('_id'))
|
|
||||||
return
|
|
||||||
|
|
||||||
node_type = node.get('node_type')
|
|
||||||
props = node.get('properties', {})
|
|
||||||
content = props.get('content_type')
|
|
||||||
|
|
||||||
if node_type == 'asset' and content == 'image':
|
|
||||||
image_file_id = props.get('file')
|
|
||||||
elif node_type == 'texture':
|
|
||||||
# Find the colour map, defaulting to the first image map available.
|
|
||||||
image_file_id = None
|
|
||||||
for image in props.get('files', []):
|
|
||||||
if image_file_id is None or image.get('map_type') == 'color':
|
|
||||||
image_file_id = image.get('file')
|
|
||||||
else:
|
|
||||||
log.debug('Not setting default picture on node type %s content type %s',
|
|
||||||
node_type, content)
|
|
||||||
return
|
|
||||||
|
|
||||||
if image_file_id is None:
|
|
||||||
log.debug('Nothing to set the picture to.')
|
|
||||||
return
|
|
||||||
|
|
||||||
log.debug('Setting default picture for node %s to %s', node.get('_id'), image_file_id)
|
|
||||||
node['picture'] = image_file_id
|
|
||||||
|
|
||||||
|
|
||||||
def nodes_set_default_picture(nodes):
|
|
||||||
for node in nodes:
|
|
||||||
node_set_default_picture(node)
|
|
||||||
|
|
||||||
|
|
||||||
def before_deleting_node(node: dict):
|
|
||||||
check_permissions('nodes', node, 'DELETE')
|
|
||||||
remove_project_references(node)
|
|
||||||
|
|
||||||
|
|
||||||
def remove_project_references(node):
|
|
||||||
project_id = node.get('project')
|
|
||||||
if not project_id:
|
|
||||||
return
|
|
||||||
|
|
||||||
node_id = node['_id']
|
|
||||||
log.info('Removing references to node %s from project %s', node_id, project_id)
|
|
||||||
|
|
||||||
projects_col = current_app.db('projects')
|
|
||||||
project = projects_col.find_one({'_id': project_id})
|
|
||||||
updates = collections.defaultdict(dict)
|
|
||||||
|
|
||||||
if project.get('header_node') == node_id:
|
|
||||||
updates['$unset']['header_node'] = node_id
|
|
||||||
|
|
||||||
project_reference_lists = ('nodes_blog', 'nodes_featured', 'nodes_latest')
|
|
||||||
for list_name in project_reference_lists:
|
|
||||||
references = project.get(list_name)
|
|
||||||
if not references:
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
references.remove(node_id)
|
|
||||||
except ValueError:
|
|
||||||
continue
|
|
||||||
|
|
||||||
updates['$set'][list_name] = references
|
|
||||||
|
|
||||||
if not updates:
|
|
||||||
return
|
|
||||||
|
|
||||||
updates['$set']['_etag'] = random_etag()
|
|
||||||
result = projects_col.update_one({'_id': project_id}, updates)
|
|
||||||
if result.modified_count != 1:
|
|
||||||
log.warning('Removing references to node %s from project %s resulted in %d modified documents (expected 1)',
|
|
||||||
node_id, project_id, result.modified_count)
|
|
||||||
|
|
||||||
|
|
||||||
def after_deleting_node(item):
|
|
||||||
from pillar.celery import search_index_tasks as index
|
|
||||||
index.node_delete.delay(str(item['_id']))
|
|
||||||
|
|
||||||
|
|
||||||
only_for_textures = only_for_node_type_decorator('texture')
|
|
||||||
|
|
||||||
|
|
||||||
@only_for_textures
|
|
||||||
def texture_sort_files(node, original=None):
|
|
||||||
"""Sort files alphabetically by map type, with colour map first."""
|
|
||||||
|
|
||||||
try:
|
|
||||||
files = node['properties']['files']
|
|
||||||
except KeyError:
|
|
||||||
return
|
|
||||||
|
|
||||||
# Sort the map types alphabetically, ensuring 'color' comes first.
|
|
||||||
as_dict = {f['map_type']: f for f in files}
|
|
||||||
types = sorted(as_dict.keys(), key=lambda k: '\0' if k == 'color' else k)
|
|
||||||
node['properties']['files'] = [as_dict[map_type] for map_type in types]
|
|
||||||
|
|
||||||
|
|
||||||
def textures_sort_files(nodes):
|
|
||||||
for node in nodes:
|
|
||||||
texture_sort_files(node)
|
|
||||||
|
|
||||||
|
|
||||||
def short_link_info(short_code):
|
|
||||||
"""Returns the short link info in a dict."""
|
|
||||||
|
|
||||||
short_link = urllib.parse.urljoin(
|
|
||||||
current_app.config['SHORT_LINK_BASE_URL'], short_code)
|
|
||||||
|
|
||||||
return {
|
|
||||||
'short_code': short_code,
|
|
||||||
'short_link': short_link,
|
|
||||||
}
|
|
@ -1,7 +1,7 @@
|
|||||||
"""Code for moving around nodes."""
|
"""Code for moving around nodes."""
|
||||||
|
|
||||||
import attr
|
import attr
|
||||||
import pymongo.database
|
import flask_pymongo.wrappers
|
||||||
from bson import ObjectId
|
from bson import ObjectId
|
||||||
|
|
||||||
from pillar import attrs_extra
|
from pillar import attrs_extra
|
||||||
@ -10,7 +10,7 @@ import pillar.api.file_storage.moving
|
|||||||
|
|
||||||
@attr.s
|
@attr.s
|
||||||
class NodeMover(object):
|
class NodeMover(object):
|
||||||
db = attr.ib(validator=attr.validators.instance_of(pymongo.database.Database))
|
db = attr.ib(validator=attr.validators.instance_of(flask_pymongo.wrappers.Database))
|
||||||
skip_gcs = attr.ib(default=False, validator=attr.validators.instance_of(bool))
|
skip_gcs = attr.ib(default=False, validator=attr.validators.instance_of(bool))
|
||||||
_log = attrs_extra.log('%s.NodeMover' % __name__)
|
_log = attrs_extra.log('%s.NodeMover' % __name__)
|
||||||
|
|
||||||
|
@ -153,7 +153,7 @@ class OrgManager:
|
|||||||
org_coll = current_app.db('organizations')
|
org_coll = current_app.db('organizations')
|
||||||
users_coll = current_app.db('users')
|
users_coll = current_app.db('users')
|
||||||
|
|
||||||
if users_coll.count_documents({'_id': user_id}) == 0:
|
if users_coll.count({'_id': user_id}) == 0:
|
||||||
raise ValueError('User not found')
|
raise ValueError('User not found')
|
||||||
|
|
||||||
self._log.info('Updating organization %s, setting admin user to %s', org_id, user_id)
|
self._log.info('Updating organization %s, setting admin user to %s', org_id, user_id)
|
||||||
@ -189,7 +189,7 @@ class OrgManager:
|
|||||||
if user_doc is not None:
|
if user_doc is not None:
|
||||||
user_id = user_doc['_id']
|
user_id = user_doc['_id']
|
||||||
|
|
||||||
if user_id and not users_coll.count_documents({'_id': user_id}):
|
if user_id and not users_coll.count({'_id': user_id}):
|
||||||
raise wz_exceptions.UnprocessableEntity('User does not exist')
|
raise wz_exceptions.UnprocessableEntity('User does not exist')
|
||||||
|
|
||||||
self._log.info('Removing user %s / %s from organization %s', user_id, email, org_id)
|
self._log.info('Removing user %s / %s from organization %s', user_id, email, org_id)
|
||||||
@ -374,7 +374,7 @@ class OrgManager:
|
|||||||
member_ids = [str2id(uid) for uid in member_sting_ids]
|
member_ids = [str2id(uid) for uid in member_sting_ids]
|
||||||
users_coll = current_app.db('users')
|
users_coll = current_app.db('users')
|
||||||
users = users_coll.find({'_id': {'$in': member_ids}},
|
users = users_coll.find({'_id': {'$in': member_ids}},
|
||||||
projection={'_id': 1, 'full_name': 1, 'email': 1, 'avatar': 1})
|
projection={'_id': 1, 'full_name': 1, 'email': 1})
|
||||||
return list(users)
|
return list(users)
|
||||||
|
|
||||||
def user_has_organizations(self, user_id: bson.ObjectId) -> bool:
|
def user_has_organizations(self, user_id: bson.ObjectId) -> bool:
|
||||||
@ -385,7 +385,7 @@ class OrgManager:
|
|||||||
|
|
||||||
org_coll = current_app.db('organizations')
|
org_coll = current_app.db('organizations')
|
||||||
|
|
||||||
org_count = org_coll.count_documents({'$or': [
|
org_count = org_coll.count({'$or': [
|
||||||
{'admin_uid': user_id},
|
{'admin_uid': user_id},
|
||||||
{'members': user_id}
|
{'members': user_id}
|
||||||
]})
|
]})
|
||||||
@ -396,7 +396,7 @@ class OrgManager:
|
|||||||
"""Return True iff the email is an unknown member of some org."""
|
"""Return True iff the email is an unknown member of some org."""
|
||||||
|
|
||||||
org_coll = current_app.db('organizations')
|
org_coll = current_app.db('organizations')
|
||||||
org_count = org_coll.count_documents({'unknown_members': member_email})
|
org_count = org_coll.count({'unknown_members': member_email})
|
||||||
return bool(org_count)
|
return bool(org_count)
|
||||||
|
|
||||||
def roles_for_ip_address(self, remote_addr: str) -> typing.Set[str]:
|
def roles_for_ip_address(self, remote_addr: str) -> typing.Set[str]:
|
||||||
|
@ -194,7 +194,7 @@ class OrganizationPatchHandler(patch_handler.AbstractPatchHandler):
|
|||||||
self.log.info('User %s edits Organization %s: %s', current_user_id, org_id, update)
|
self.log.info('User %s edits Organization %s: %s', current_user_id, org_id, update)
|
||||||
|
|
||||||
validator = current_app.validator_for_resource('organizations')
|
validator = current_app.validator_for_resource('organizations')
|
||||||
if not validator.validate_update(update, org_id, persisted_document={}):
|
if not validator.validate_update(update, org_id):
|
||||||
resp = jsonify({
|
resp = jsonify({
|
||||||
'_errors': validator.errors,
|
'_errors': validator.errors,
|
||||||
'_message': ', '.join(f'{field}: {error}'
|
'_message': ', '.join(f'{field}: {error}'
|
||||||
|
@ -9,7 +9,6 @@ def setup_app(app, api_prefix):
|
|||||||
app.on_replace_projects += hooks.override_is_private_field
|
app.on_replace_projects += hooks.override_is_private_field
|
||||||
app.on_replace_projects += hooks.before_edit_check_permissions
|
app.on_replace_projects += hooks.before_edit_check_permissions
|
||||||
app.on_replace_projects += hooks.protect_sensitive_fields
|
app.on_replace_projects += hooks.protect_sensitive_fields
|
||||||
app.on_replace_projects += hooks.parse_markdown
|
|
||||||
|
|
||||||
app.on_update_projects += hooks.override_is_private_field
|
app.on_update_projects += hooks.override_is_private_field
|
||||||
app.on_update_projects += hooks.before_edit_check_permissions
|
app.on_update_projects += hooks.before_edit_check_permissions
|
||||||
@ -20,8 +19,6 @@ def setup_app(app, api_prefix):
|
|||||||
|
|
||||||
app.on_insert_projects += hooks.before_inserting_override_is_private_field
|
app.on_insert_projects += hooks.before_inserting_override_is_private_field
|
||||||
app.on_insert_projects += hooks.before_inserting_projects
|
app.on_insert_projects += hooks.before_inserting_projects
|
||||||
app.on_insert_projects += hooks.parse_markdowns
|
|
||||||
|
|
||||||
app.on_inserted_projects += hooks.after_inserting_projects
|
app.on_inserted_projects += hooks.after_inserting_projects
|
||||||
|
|
||||||
app.on_fetched_item_projects += hooks.before_returning_project_permissions
|
app.on_fetched_item_projects += hooks.before_returning_project_permissions
|
||||||
|
@ -3,7 +3,6 @@ import logging
|
|||||||
|
|
||||||
from flask import request, abort
|
from flask import request, abort
|
||||||
|
|
||||||
import pillar
|
|
||||||
from pillar import current_app
|
from pillar import current_app
|
||||||
from pillar.api.node_types.asset import node_type_asset
|
from pillar.api.node_types.asset import node_type_asset
|
||||||
from pillar.api.node_types.comment import node_type_comment
|
from pillar.api.node_types.comment import node_type_comment
|
||||||
@ -72,19 +71,14 @@ def before_delete_project(document):
|
|||||||
|
|
||||||
def after_delete_project(project: dict):
|
def after_delete_project(project: dict):
|
||||||
"""Perform delete on the project's files too."""
|
"""Perform delete on the project's files too."""
|
||||||
from werkzeug.exceptions import NotFound
|
|
||||||
from eve.methods.delete import delete
|
from eve.methods.delete import delete
|
||||||
|
|
||||||
pid = project['_id']
|
pid = project['_id']
|
||||||
log.info('Project %s was deleted, also deleting its files.', pid)
|
log.info('Project %s was deleted, also deleting its files.', pid)
|
||||||
|
|
||||||
try:
|
|
||||||
r, _, _, status = delete('files', {'project': pid})
|
r, _, _, status = delete('files', {'project': pid})
|
||||||
except NotFound:
|
|
||||||
# There were no files, and that's fine.
|
|
||||||
return
|
|
||||||
if status != 204:
|
if status != 204:
|
||||||
# Will never happen because bloody Eve always returns 204 or raises an exception.
|
|
||||||
log.warning('Unable to delete files of project %s: %s', pid, r)
|
log.warning('Unable to delete files of project %s: %s', pid, r)
|
||||||
|
|
||||||
|
|
||||||
@ -247,37 +241,3 @@ def project_node_type_has_method(response):
|
|||||||
def projects_node_type_has_method(response):
|
def projects_node_type_has_method(response):
|
||||||
for project in response['_items']:
|
for project in response['_items']:
|
||||||
project_node_type_has_method(project)
|
project_node_type_has_method(project)
|
||||||
|
|
||||||
|
|
||||||
def parse_markdown(project, original=None):
|
|
||||||
schema = current_app.config['DOMAIN']['projects']['schema']
|
|
||||||
|
|
||||||
def find_markdown_fields(schema, project):
|
|
||||||
"""Find and process all Markdown coerced fields.
|
|
||||||
|
|
||||||
- look for fields with a 'coerce': 'markdown' property
|
|
||||||
- parse the name of the field and generate the sibling field name (_<field_name>_html -> <field_name>)
|
|
||||||
- parse the content of the <field_name> field as markdown and save it in _<field_name>_html
|
|
||||||
"""
|
|
||||||
for field_name, field_value in schema.items():
|
|
||||||
if not isinstance(field_value, dict):
|
|
||||||
continue
|
|
||||||
if field_value.get('coerce') != 'markdown':
|
|
||||||
continue
|
|
||||||
if field_name not in project:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Construct markdown source field name (strip the leading '_' and the trailing '_html')
|
|
||||||
source_field_name = field_name[1:-5]
|
|
||||||
html = pillar.markdown.markdown(project[source_field_name])
|
|
||||||
project[field_name] = html
|
|
||||||
|
|
||||||
if isinstance(project, dict) and field_name in project:
|
|
||||||
find_markdown_fields(field_value, project[field_name])
|
|
||||||
|
|
||||||
find_markdown_fields(schema, project)
|
|
||||||
|
|
||||||
|
|
||||||
def parse_markdowns(items):
|
|
||||||
for item in items:
|
|
||||||
parse_markdown(item)
|
|
||||||
|
@ -25,11 +25,8 @@ def merge_project(pid_from: ObjectId, pid_to: ObjectId):
|
|||||||
|
|
||||||
# Move the files first. Since this requires API calls to an external
|
# Move the files first. Since this requires API calls to an external
|
||||||
# service, this is more likely to go wrong than moving the nodes.
|
# service, this is more likely to go wrong than moving the nodes.
|
||||||
query = {'project': pid_from}
|
to_move = files_coll.find({'project': pid_from}, projection={'_id': 1})
|
||||||
to_move = files_coll.find(query, projection={'_id': 1})
|
log.info('Moving %d files to project %s', to_move.count(), pid_to)
|
||||||
|
|
||||||
to_move_count = files_coll.count_documents(query)
|
|
||||||
log.info('Moving %d files to project %s', to_move_count, pid_to)
|
|
||||||
for file_doc in to_move:
|
for file_doc in to_move:
|
||||||
fid = file_doc['_id']
|
fid = file_doc['_id']
|
||||||
log.debug('moving file %s to project %s', fid, pid_to)
|
log.debug('moving file %s to project %s', fid, pid_to)
|
||||||
@ -38,7 +35,7 @@ def merge_project(pid_from: ObjectId, pid_to: ObjectId):
|
|||||||
# Mass-move the nodes.
|
# Mass-move the nodes.
|
||||||
etag = random_etag()
|
etag = random_etag()
|
||||||
result = nodes_coll.update_many(
|
result = nodes_coll.update_many(
|
||||||
query,
|
{'project': pid_from},
|
||||||
{'$set': {'project': pid_to,
|
{'$set': {'project': pid_to,
|
||||||
'_etag': etag,
|
'_etag': etag,
|
||||||
'_updated': utcnow(),
|
'_updated': utcnow(),
|
||||||
|
@ -5,7 +5,6 @@ from bson import ObjectId
|
|||||||
from flask import Blueprint, request, current_app, make_response, url_for
|
from flask import Blueprint, request, current_app, make_response, url_for
|
||||||
from werkzeug import exceptions as wz_exceptions
|
from werkzeug import exceptions as wz_exceptions
|
||||||
|
|
||||||
import pillar.api.users.avatar
|
|
||||||
from pillar.api.utils import authorization, jsonify, str2id
|
from pillar.api.utils import authorization, jsonify, str2id
|
||||||
from pillar.api.utils import mongo
|
from pillar.api.utils import mongo
|
||||||
from pillar.api.utils.authorization import require_login, check_permissions
|
from pillar.api.utils.authorization import require_login, check_permissions
|
||||||
@ -55,13 +54,10 @@ def project_manage_users():
|
|||||||
project = projects_collection.find_one({'_id': ObjectId(project_id)})
|
project = projects_collection.find_one({'_id': ObjectId(project_id)})
|
||||||
admin_group_id = project['permissions']['groups'][0]['group']
|
admin_group_id = project['permissions']['groups'][0]['group']
|
||||||
|
|
||||||
users = list(users_collection.find(
|
users = users_collection.find(
|
||||||
{'groups': {'$in': [admin_group_id]}},
|
{'groups': {'$in': [admin_group_id]}},
|
||||||
{'username': 1, 'email': 1, 'full_name': 1, 'avatar': 1}))
|
{'username': 1, 'email': 1, 'full_name': 1})
|
||||||
for user in users:
|
return jsonify({'_status': 'OK', '_items': list(users)})
|
||||||
user['avatar_url'] = pillar.api.users.avatar.url(user)
|
|
||||||
user.pop('avatar', None)
|
|
||||||
return jsonify({'_status': 'OK', '_items': users})
|
|
||||||
|
|
||||||
# The request is not a form, since it comes from the API sdk
|
# The request is not a form, since it comes from the API sdk
|
||||||
data = json.loads(request.data)
|
data = json.loads(request.data)
|
||||||
@ -96,7 +92,7 @@ def project_manage_users():
|
|||||||
action, current_user_id)
|
action, current_user_id)
|
||||||
raise wz_exceptions.UnprocessableEntity()
|
raise wz_exceptions.UnprocessableEntity()
|
||||||
|
|
||||||
users_collection.update_one({'_id': target_user_id},
|
users_collection.update({'_id': target_user_id},
|
||||||
{operation: {'groups': admin_group['_id']}})
|
{operation: {'groups': admin_group['_id']}})
|
||||||
|
|
||||||
user = users_collection.find_one({'_id': target_user_id},
|
user = users_collection.find_one({'_id': target_user_id},
|
||||||
@ -145,3 +141,5 @@ def get_allowed_methods(project_id=None, node_type=None):
|
|||||||
resp.status_code = 204
|
resp.status_code = 204
|
||||||
|
|
||||||
return resp
|
return resp
|
||||||
|
|
||||||
|
|
||||||
|
@ -7,7 +7,6 @@ from werkzeug.exceptions import abort
|
|||||||
|
|
||||||
from pillar import current_app
|
from pillar import current_app
|
||||||
from pillar.auth import current_user
|
from pillar.auth import current_user
|
||||||
from pillar.api import file_storage_backends
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -156,18 +155,6 @@ def project_id(project_url: str) -> ObjectId:
|
|||||||
return proj['_id']
|
return proj['_id']
|
||||||
|
|
||||||
|
|
||||||
def get_project_url(project_id: ObjectId) -> str:
|
|
||||||
"""Returns the project URL, or raises a ValueError when not found."""
|
|
||||||
|
|
||||||
proj_coll = current_app.db('projects')
|
|
||||||
proj = proj_coll.find_one({'_id': project_id, '_deleted': {'$ne': True}},
|
|
||||||
projection={'url': True})
|
|
||||||
|
|
||||||
if not proj:
|
|
||||||
raise ValueError(f'project with id={project_id} not found')
|
|
||||||
return proj['url']
|
|
||||||
|
|
||||||
|
|
||||||
def get_project(project_url: str) -> dict:
|
def get_project(project_url: str) -> dict:
|
||||||
"""Find a project in the database, raises ValueError if not found.
|
"""Find a project in the database, raises ValueError if not found.
|
||||||
|
|
||||||
@ -198,17 +185,5 @@ def put_project(project: dict):
|
|||||||
result, _, _, status_code = current_app.put_internal('projects', proj_no_none, _id=pid)
|
result, _, _, status_code = current_app.put_internal('projects', proj_no_none, _id=pid)
|
||||||
|
|
||||||
if status_code != 200:
|
if status_code != 200:
|
||||||
message = f"Can't update project {pid}, status {status_code} with issues: {result}"
|
raise ValueError(f"Can't update project {pid}, "
|
||||||
log.error(message)
|
f"status {status_code} with issues: {result}")
|
||||||
raise ValueError(message)
|
|
||||||
|
|
||||||
|
|
||||||
def storage(project_id: ObjectId) -> file_storage_backends.Bucket:
|
|
||||||
"""Return the storage bucket for this project.
|
|
||||||
|
|
||||||
For now this returns a bucket in the default storage backend, since
|
|
||||||
individual projects do not have a 'storage backend' setting (this is
|
|
||||||
set per file, not per project).
|
|
||||||
"""
|
|
||||||
|
|
||||||
return file_storage_backends.default_storage_backend(str(project_id))
|
|
||||||
|
@ -81,7 +81,6 @@ class Node(es.DocType):
|
|||||||
fields={
|
fields={
|
||||||
'id': es.Keyword(),
|
'id': es.Keyword(),
|
||||||
'name': es.Keyword(),
|
'name': es.Keyword(),
|
||||||
'url': es.Keyword(),
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -154,21 +153,18 @@ def create_doc_from_node_data(node_to_index: dict) -> typing.Optional[Node]:
|
|||||||
doc.objectID = str(node_to_index['objectID'])
|
doc.objectID = str(node_to_index['objectID'])
|
||||||
doc.node_type = node_to_index['node_type']
|
doc.node_type = node_to_index['node_type']
|
||||||
doc.name = node_to_index['name']
|
doc.name = node_to_index['name']
|
||||||
doc.description = node_to_index.get('description')
|
|
||||||
doc.user.id = str(node_to_index['user']['_id'])
|
doc.user.id = str(node_to_index['user']['_id'])
|
||||||
doc.user.name = node_to_index['user']['full_name']
|
doc.user.name = node_to_index['user']['full_name']
|
||||||
doc.project.id = str(node_to_index['project']['_id'])
|
doc.project.id = str(node_to_index['project']['_id'])
|
||||||
doc.project.name = node_to_index['project']['name']
|
doc.project.name = node_to_index['project']['name']
|
||||||
doc.project.url = node_to_index['project']['url']
|
|
||||||
|
|
||||||
if node_to_index['node_type'] == 'asset':
|
if node_to_index['node_type'] == 'asset':
|
||||||
doc.media = node_to_index['media']
|
doc.media = node_to_index['media']
|
||||||
|
|
||||||
doc.picture = str(node_to_index.get('picture'))
|
doc.picture = node_to_index.get('picture')
|
||||||
|
|
||||||
doc.tags = node_to_index.get('tags')
|
doc.tags = node_to_index.get('tags')
|
||||||
doc.license_notes = node_to_index.get('license_notes')
|
doc.license_notes = node_to_index.get('license_notes')
|
||||||
doc.is_free = node_to_index.get('is_free')
|
|
||||||
|
|
||||||
doc.created_at = node_to_index['created']
|
doc.created_at = node_to_index['created']
|
||||||
doc.updated_at = node_to_index['updated']
|
doc.updated_at = node_to_index['updated']
|
||||||
|
@ -3,18 +3,16 @@ import logging
|
|||||||
import typing
|
import typing
|
||||||
|
|
||||||
from elasticsearch import Elasticsearch
|
from elasticsearch import Elasticsearch
|
||||||
from elasticsearch_dsl import Search, Q, MultiSearch
|
from elasticsearch_dsl import Search, Q
|
||||||
from elasticsearch_dsl.query import Query
|
from elasticsearch_dsl.query import Query
|
||||||
|
|
||||||
from pillar import current_app
|
from pillar import current_app
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
BOOLEAN_TERMS = ['is_free']
|
NODE_AGG_TERMS = ['node_type', 'media', 'tags', 'is_free']
|
||||||
NODE_AGG_TERMS = ['node_type', 'media', 'tags', *BOOLEAN_TERMS]
|
|
||||||
USER_AGG_TERMS = ['roles', ]
|
USER_AGG_TERMS = ['roles', ]
|
||||||
ITEMS_PER_PAGE = 10
|
ITEMS_PER_PAGE = 10
|
||||||
USER_SOURCE_INCLUDE = ['full_name', 'objectID', 'username']
|
|
||||||
|
|
||||||
# Will be set in setup_app()
|
# Will be set in setup_app()
|
||||||
client: Elasticsearch = None
|
client: Elasticsearch = None
|
||||||
@ -29,25 +27,26 @@ def add_aggs_to_search(search, agg_terms):
|
|||||||
search.aggs.bucket(term, 'terms', field=term)
|
search.aggs.bucket(term, 'terms', field=term)
|
||||||
|
|
||||||
|
|
||||||
def make_filter(must: list, terms: dict) -> list:
|
def make_must(must: list, terms: dict) -> list:
|
||||||
""" Given term parameters append must queries to the must list """
|
""" Given term parameters append must queries to the must list """
|
||||||
|
|
||||||
for field, value in terms.items():
|
for field, value in terms.items():
|
||||||
if value not in (None, ''):
|
if value:
|
||||||
must.append({'term': {field: value}})
|
must.append({'match': {field: value}})
|
||||||
|
|
||||||
return must
|
return must
|
||||||
|
|
||||||
|
|
||||||
def nested_bool(filters: list, should: list, terms: dict, *, index_alias: str) -> Search:
|
def nested_bool(must: list, should: list, terms: dict, *, index_alias: str) -> Search:
|
||||||
"""
|
"""
|
||||||
Create a nested bool, where the aggregation selection is a must.
|
Create a nested bool, where the aggregation selection is a must.
|
||||||
|
|
||||||
:param index_alias: 'USER' or 'NODE', see ELASTIC_INDICES config.
|
:param index_alias: 'USER' or 'NODE', see ELASTIC_INDICES config.
|
||||||
"""
|
"""
|
||||||
filters = make_filter(filters, terms)
|
must = make_must(must, terms)
|
||||||
bool_query = Q('bool', should=should)
|
bool_query = Q('bool', should=should)
|
||||||
bool_query = Q('bool', must=bool_query, filter=filters)
|
must.append(bool_query)
|
||||||
|
bool_query = Q('bool', must=must)
|
||||||
|
|
||||||
index = current_app.config['ELASTIC_INDICES'][index_alias]
|
index = current_app.config['ELASTIC_INDICES'][index_alias]
|
||||||
search = Search(using=client, index=index)
|
search = Search(using=client, index=index)
|
||||||
@ -56,34 +55,12 @@ def nested_bool(filters: list, should: list, terms: dict, *, index_alias: str) -
|
|||||||
return search
|
return search
|
||||||
|
|
||||||
|
|
||||||
def do_multi_node_search(queries: typing.List[dict]) -> typing.List[dict]:
|
|
||||||
"""
|
|
||||||
Given user query input and term refinements
|
|
||||||
search for public published nodes
|
|
||||||
"""
|
|
||||||
search = create_multi_node_search(queries)
|
|
||||||
return _execute_multi(search)
|
|
||||||
|
|
||||||
|
|
||||||
def do_node_search(query: str, terms: dict, page: int, project_id: str='') -> dict:
|
def do_node_search(query: str, terms: dict, page: int, project_id: str='') -> dict:
|
||||||
"""
|
"""
|
||||||
Given user query input and term refinements
|
Given user query input and term refinements
|
||||||
search for public published nodes
|
search for public published nodes
|
||||||
"""
|
"""
|
||||||
search = create_node_search(query, terms, page, project_id)
|
|
||||||
return _execute(search)
|
|
||||||
|
|
||||||
|
|
||||||
def create_multi_node_search(queries: typing.List[dict]) -> MultiSearch:
|
|
||||||
search = MultiSearch(using=client)
|
|
||||||
for q in queries:
|
|
||||||
search = search.add(create_node_search(**q))
|
|
||||||
|
|
||||||
return search
|
|
||||||
|
|
||||||
|
|
||||||
def create_node_search(query: str, terms: dict, page: int, project_id: str='') -> Search:
|
|
||||||
terms = _transform_terms(terms)
|
|
||||||
should = [
|
should = [
|
||||||
Q('match', name=query),
|
Q('match', name=query),
|
||||||
|
|
||||||
@ -94,30 +71,52 @@ def create_node_search(query: str, terms: dict, page: int, project_id: str='') -
|
|||||||
Q('term', media=query),
|
Q('term', media=query),
|
||||||
Q('term', tags=query),
|
Q('term', tags=query),
|
||||||
]
|
]
|
||||||
filters = []
|
|
||||||
|
must = []
|
||||||
if project_id:
|
if project_id:
|
||||||
filters.append({'term': {'project.id': project_id}})
|
must.append({'term': {'project.id': project_id}})
|
||||||
|
|
||||||
if not query:
|
if not query:
|
||||||
should = []
|
should = []
|
||||||
search = nested_bool(filters, should, terms, index_alias='NODE')
|
|
||||||
|
search = nested_bool(must, should, terms, index_alias='NODE')
|
||||||
if not query:
|
if not query:
|
||||||
search = search.sort('-created_at')
|
search = search.sort('-created_at')
|
||||||
add_aggs_to_search(search, NODE_AGG_TERMS)
|
add_aggs_to_search(search, NODE_AGG_TERMS)
|
||||||
search = paginate(search, page)
|
search = paginate(search, page)
|
||||||
|
|
||||||
if log.isEnabledFor(logging.DEBUG):
|
if log.isEnabledFor(logging.DEBUG):
|
||||||
log.debug(json.dumps(search.to_dict(), indent=4))
|
log.debug(json.dumps(search.to_dict(), indent=4))
|
||||||
return search
|
|
||||||
|
response = search.execute()
|
||||||
|
|
||||||
|
if log.isEnabledFor(logging.DEBUG):
|
||||||
|
log.debug(json.dumps(response.to_dict(), indent=4))
|
||||||
|
|
||||||
|
return response.to_dict()
|
||||||
|
|
||||||
|
|
||||||
def do_user_search(query: str, terms: dict, page: int) -> dict:
|
def do_user_search(query: str, terms: dict, page: int) -> dict:
|
||||||
""" return user objects represented in elasicsearch result dict"""
|
""" return user objects represented in elasicsearch result dict"""
|
||||||
|
|
||||||
search = create_user_search(query, terms, page)
|
must, should = _common_user_search(query)
|
||||||
return _execute(search)
|
search = nested_bool(must, should, terms, index_alias='USER')
|
||||||
|
add_aggs_to_search(search, USER_AGG_TERMS)
|
||||||
|
search = paginate(search, page)
|
||||||
|
|
||||||
|
if log.isEnabledFor(logging.DEBUG):
|
||||||
|
log.debug(json.dumps(search.to_dict(), indent=4))
|
||||||
|
|
||||||
|
response = search.execute()
|
||||||
|
|
||||||
|
if log.isEnabledFor(logging.DEBUG):
|
||||||
|
log.debug(json.dumps(response.to_dict(), indent=4))
|
||||||
|
|
||||||
|
return response.to_dict()
|
||||||
|
|
||||||
|
|
||||||
def _common_user_search(query: str) -> (typing.List[Query], typing.List[Query]):
|
def _common_user_search(query: str) -> (typing.List[Query], typing.List[Query]):
|
||||||
"""Construct (filter,should) for regular + admin user search."""
|
"""Construct (must,shoud) for regular + admin user search."""
|
||||||
if not query:
|
if not query:
|
||||||
return [], []
|
return [], []
|
||||||
|
|
||||||
@ -145,31 +144,8 @@ def do_user_search_admin(query: str, terms: dict, page: int) -> dict:
|
|||||||
search all user fields and provide aggregation information
|
search all user fields and provide aggregation information
|
||||||
"""
|
"""
|
||||||
|
|
||||||
search = create_user_admin_search(query, terms, page)
|
must, should = _common_user_search(query)
|
||||||
return _execute(search)
|
|
||||||
|
|
||||||
|
|
||||||
def _execute(search: Search) -> dict:
|
|
||||||
if log.isEnabledFor(logging.DEBUG):
|
|
||||||
log.debug(json.dumps(search.to_dict(), indent=4))
|
|
||||||
resp = search.execute()
|
|
||||||
if log.isEnabledFor(logging.DEBUG):
|
|
||||||
log.debug(json.dumps(resp.to_dict(), indent=4))
|
|
||||||
return resp.to_dict()
|
|
||||||
|
|
||||||
|
|
||||||
def _execute_multi(search: typing.List[Search]) -> typing.List[dict]:
|
|
||||||
if log.isEnabledFor(logging.DEBUG):
|
|
||||||
log.debug(json.dumps(search.to_dict(), indent=4))
|
|
||||||
resp = search.execute()
|
|
||||||
if log.isEnabledFor(logging.DEBUG):
|
|
||||||
log.debug(json.dumps(resp.to_dict(), indent=4))
|
|
||||||
return [r.to_dict() for r in resp]
|
|
||||||
|
|
||||||
|
|
||||||
def create_user_admin_search(query: str, terms: dict, page: int) -> Search:
|
|
||||||
terms = _transform_terms(terms)
|
|
||||||
filters, should = _common_user_search(query)
|
|
||||||
if query:
|
if query:
|
||||||
# We most likely got and id field. we should find it.
|
# We most likely got and id field. we should find it.
|
||||||
if len(query) == len('563aca02c379cf0005e8e17d'):
|
if len(query) == len('563aca02c379cf0005e8e17d'):
|
||||||
@ -179,34 +155,26 @@ def create_user_admin_search(query: str, terms: dict, page: int) -> Search:
|
|||||||
'boost': 100, # how much more it counts for the score
|
'boost': 100, # how much more it counts for the score
|
||||||
}
|
}
|
||||||
}})
|
}})
|
||||||
search = nested_bool(filters, should, terms, index_alias='USER')
|
|
||||||
|
search = nested_bool(must, should, terms, index_alias='USER')
|
||||||
add_aggs_to_search(search, USER_AGG_TERMS)
|
add_aggs_to_search(search, USER_AGG_TERMS)
|
||||||
search = paginate(search, page)
|
search = paginate(search, page)
|
||||||
return search
|
|
||||||
|
|
||||||
|
if log.isEnabledFor(logging.DEBUG):
|
||||||
|
log.debug(json.dumps(search.to_dict(), indent=4))
|
||||||
|
|
||||||
def create_user_search(query: str, terms: dict, page: int) -> Search:
|
response = search.execute()
|
||||||
search = create_user_admin_search(query, terms, page)
|
|
||||||
return search.source(include=USER_SOURCE_INCLUDE)
|
if log.isEnabledFor(logging.DEBUG):
|
||||||
|
log.debug(json.dumps(response.to_dict(), indent=4))
|
||||||
|
|
||||||
|
return response.to_dict()
|
||||||
|
|
||||||
|
|
||||||
def paginate(search: Search, page_idx: int) -> Search:
|
def paginate(search: Search, page_idx: int) -> Search:
|
||||||
return search[page_idx * ITEMS_PER_PAGE:(page_idx + 1) * ITEMS_PER_PAGE]
|
return search[page_idx * ITEMS_PER_PAGE:(page_idx + 1) * ITEMS_PER_PAGE]
|
||||||
|
|
||||||
|
|
||||||
def _transform_terms(terms: dict) -> dict:
|
|
||||||
"""
|
|
||||||
Ugly hack! Elastic uses 1/0 for boolean values in its aggregate response,
|
|
||||||
but expects true/false in queries.
|
|
||||||
"""
|
|
||||||
transformed = terms.copy()
|
|
||||||
for t in BOOLEAN_TERMS:
|
|
||||||
orig = transformed.get(t)
|
|
||||||
if orig in ('1', '0'):
|
|
||||||
transformed[t] = bool(int(orig))
|
|
||||||
return transformed
|
|
||||||
|
|
||||||
|
|
||||||
def setup_app(app):
|
def setup_app(app):
|
||||||
global client
|
global client
|
||||||
|
|
||||||
|
@ -18,7 +18,7 @@ TERMS = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def _term_filters(args) -> dict:
|
def _term_filters() -> dict:
|
||||||
"""
|
"""
|
||||||
Check if frontent wants to filter stuff
|
Check if frontent wants to filter stuff
|
||||||
on specific fields AKA facets
|
on specific fields AKA facets
|
||||||
@ -26,52 +26,35 @@ def _term_filters(args) -> dict:
|
|||||||
return mapping with term field name
|
return mapping with term field name
|
||||||
and provided user term value
|
and provided user term value
|
||||||
"""
|
"""
|
||||||
return {term: args.get(term, '') for term in TERMS}
|
return {term: request.args.get(term, '') for term in TERMS}
|
||||||
|
|
||||||
|
|
||||||
def _page_index(page) -> int:
|
def _page_index() -> int:
|
||||||
"""Return the page index from the query string."""
|
"""Return the page index from the query string."""
|
||||||
try:
|
try:
|
||||||
page_idx = int(page)
|
page_idx = int(request.args.get('page') or '0')
|
||||||
except TypeError:
|
except TypeError:
|
||||||
log.info('invalid page number %r received', request.args.get('page'))
|
log.info('invalid page number %r received', request.args.get('page'))
|
||||||
raise wz_exceptions.BadRequest()
|
raise wz_exceptions.BadRequest()
|
||||||
return page_idx
|
return page_idx
|
||||||
|
|
||||||
|
|
||||||
@blueprint_search.route('/', methods=['GET'])
|
@blueprint_search.route('/')
|
||||||
def search_nodes():
|
def search_nodes():
|
||||||
searchword = request.args.get('q', '')
|
searchword = request.args.get('q', '')
|
||||||
project_id = request.args.get('project', '')
|
project_id = request.args.get('project', '')
|
||||||
terms = _term_filters(request.args)
|
terms = _term_filters()
|
||||||
page_idx = _page_index(request.args.get('page', 0))
|
page_idx = _page_index()
|
||||||
|
|
||||||
result = queries.do_node_search(searchword, terms, page_idx, project_id)
|
result = queries.do_node_search(searchword, terms, page_idx, project_id)
|
||||||
return jsonify(result)
|
return jsonify(result)
|
||||||
|
|
||||||
@blueprint_search.route('/multisearch', methods=['POST'])
|
|
||||||
def multi_search_nodes():
|
|
||||||
if len(request.args) != 1:
|
|
||||||
log.info(f'Expected 1 argument, received {len(request.args)}')
|
|
||||||
|
|
||||||
json_obj = request.json
|
|
||||||
q = []
|
|
||||||
for row in json_obj:
|
|
||||||
q.append({
|
|
||||||
'query': row.get('q', ''),
|
|
||||||
'project_id': row.get('project', ''),
|
|
||||||
'terms': _term_filters(row),
|
|
||||||
'page': _page_index(row.get('page', 0))
|
|
||||||
})
|
|
||||||
|
|
||||||
result = queries.do_multi_node_search(q)
|
|
||||||
return jsonify(result)
|
|
||||||
|
|
||||||
@blueprint_search.route('/user')
|
@blueprint_search.route('/user')
|
||||||
def search_user():
|
def search_user():
|
||||||
searchword = request.args.get('q', '')
|
searchword = request.args.get('q', '')
|
||||||
terms = _term_filters(request.args)
|
terms = _term_filters()
|
||||||
page_idx = _page_index(request.args.get('page', 0))
|
page_idx = _page_index()
|
||||||
# result is the raw elasticseach output.
|
# result is the raw elasticseach output.
|
||||||
# we need to filter fields in case of user objects.
|
# we need to filter fields in case of user objects.
|
||||||
|
|
||||||
@ -82,6 +65,27 @@ def search_user():
|
|||||||
resp.status_code = 500
|
resp.status_code = 500
|
||||||
return resp
|
return resp
|
||||||
|
|
||||||
|
# filter sensitive stuff
|
||||||
|
# we only need. objectID, full_name, username
|
||||||
|
hits = result.get('hits', {})
|
||||||
|
|
||||||
|
new_hits = []
|
||||||
|
|
||||||
|
for hit in hits.get('hits'):
|
||||||
|
source = hit['_source']
|
||||||
|
single_hit = {
|
||||||
|
'_source': {
|
||||||
|
'objectID': source.get('objectID'),
|
||||||
|
'username': source.get('username'),
|
||||||
|
'full_name': source.get('full_name'),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
new_hits.append(single_hit)
|
||||||
|
|
||||||
|
# replace search result with safe subset
|
||||||
|
result['hits']['hits'] = new_hits
|
||||||
|
|
||||||
return jsonify(result)
|
return jsonify(result)
|
||||||
|
|
||||||
|
|
||||||
@ -93,8 +97,8 @@ def search_user_admin():
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
searchword = request.args.get('q', '')
|
searchword = request.args.get('q', '')
|
||||||
terms = _term_filters(request.args)
|
terms = _term_filters()
|
||||||
page_idx = _page_index(_page_index(request.args.get('page', 0)))
|
page_idx = _page_index()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = queries.do_user_search_admin(searchword, terms, page_idx)
|
result = queries.do_user_search_admin(searchword, terms, page_idx)
|
||||||
|
@ -1,374 +0,0 @@
|
|||||||
import itertools
|
|
||||||
import typing
|
|
||||||
from datetime import datetime
|
|
||||||
from operator import itemgetter
|
|
||||||
|
|
||||||
import attr
|
|
||||||
import bson
|
|
||||||
import pymongo
|
|
||||||
from flask import Blueprint, current_app, request, url_for
|
|
||||||
|
|
||||||
import pillar
|
|
||||||
from pillar import shortcodes
|
|
||||||
from pillar.api.utils import jsonify, pretty_duration, str2id
|
|
||||||
|
|
||||||
blueprint = Blueprint('timeline', __name__)
|
|
||||||
|
|
||||||
|
|
||||||
@attr.s(auto_attribs=True)
|
|
||||||
class TimelineDO:
|
|
||||||
groups: typing.List['GroupDO'] = []
|
|
||||||
continue_from: typing.Optional[float] = None
|
|
||||||
|
|
||||||
|
|
||||||
@attr.s(auto_attribs=True)
|
|
||||||
class GroupDO:
|
|
||||||
label: typing.Optional[str] = None
|
|
||||||
url: typing.Optional[str] = None
|
|
||||||
items: typing.Dict = {}
|
|
||||||
groups: typing.Iterable['GroupDO'] = []
|
|
||||||
|
|
||||||
|
|
||||||
class SearchHelper:
|
|
||||||
def __init__(self, nbr_of_weeks: int, continue_from: typing.Optional[datetime],
|
|
||||||
project_ids: typing.List[bson.ObjectId], sort_direction: str):
|
|
||||||
self._nbr_of_weeks = nbr_of_weeks
|
|
||||||
self._continue_from = continue_from
|
|
||||||
self._project_ids = project_ids
|
|
||||||
self.sort_direction = sort_direction
|
|
||||||
|
|
||||||
def _match(self, continue_from: typing.Optional[datetime]) -> dict:
|
|
||||||
created = {}
|
|
||||||
if continue_from:
|
|
||||||
if self.sort_direction == 'desc':
|
|
||||||
created = {'_created': {'$lt': continue_from}}
|
|
||||||
else:
|
|
||||||
created = {'_created': {'$gt': continue_from}}
|
|
||||||
return {'_deleted': {'$ne': True},
|
|
||||||
'node_type': {'$in': ['asset', 'post']},
|
|
||||||
'properties.status': {'$eq': 'published'},
|
|
||||||
'project': {'$in': self._project_ids},
|
|
||||||
**created,
|
|
||||||
}
|
|
||||||
|
|
||||||
def raw_weeks_from_mongo(self) -> pymongo.collection.Collection:
|
|
||||||
direction = pymongo.DESCENDING if self.sort_direction == 'desc' else pymongo.ASCENDING
|
|
||||||
nodes_coll = current_app.db('nodes')
|
|
||||||
return nodes_coll.aggregate([
|
|
||||||
{'$match': self._match(self._continue_from)},
|
|
||||||
{'$lookup': {"from": "projects",
|
|
||||||
"localField": "project",
|
|
||||||
"foreignField": "_id",
|
|
||||||
"as": "project"}},
|
|
||||||
{'$unwind': {'path': "$project"}},
|
|
||||||
{'$lookup': {"from": "users",
|
|
||||||
"localField": "user",
|
|
||||||
"foreignField": "_id",
|
|
||||||
"as": "user"}},
|
|
||||||
{'$unwind': {'path': "$user"}},
|
|
||||||
{'$project': {
|
|
||||||
'_created': 1,
|
|
||||||
'project._id': 1,
|
|
||||||
'project.url': 1,
|
|
||||||
'project.name': 1,
|
|
||||||
'user._id': 1,
|
|
||||||
'user.full_name': 1,
|
|
||||||
'name': 1,
|
|
||||||
'node_type': 1,
|
|
||||||
'picture': 1,
|
|
||||||
'properties': 1,
|
|
||||||
'permissions': 1,
|
|
||||||
}},
|
|
||||||
{'$group': {
|
|
||||||
'_id': {'year': {'$isoWeekYear': '$_created'},
|
|
||||||
'week': {'$isoWeek': '$_created'}},
|
|
||||||
'nodes': {'$push': '$$ROOT'}
|
|
||||||
}},
|
|
||||||
{'$sort': {'_id.year': direction,
|
|
||||||
'_id.week': direction}},
|
|
||||||
{'$limit': self._nbr_of_weeks}
|
|
||||||
])
|
|
||||||
|
|
||||||
def has_more(self, continue_from: datetime) -> bool:
|
|
||||||
nodes_coll = current_app.db('nodes')
|
|
||||||
result = nodes_coll.count_documents(self._match(continue_from))
|
|
||||||
return bool(result)
|
|
||||||
|
|
||||||
|
|
||||||
class Grouper:
|
|
||||||
@classmethod
|
|
||||||
def label(cls, node):
|
|
||||||
return None
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def url(cls, node):
|
|
||||||
return None
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def group_key(cls) -> typing.Callable[[dict], typing.Any]:
|
|
||||||
raise NotImplemented()
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def sort_key(cls) -> typing.Callable[[dict], typing.Any]:
|
|
||||||
raise NotImplemented()
|
|
||||||
|
|
||||||
|
|
||||||
class ProjectGrouper(Grouper):
|
|
||||||
@classmethod
|
|
||||||
def label(cls, project: dict):
|
|
||||||
return project['name']
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def url(cls, project: dict):
|
|
||||||
return url_for('projects.view', project_url=project['url'])
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def group_key(cls) -> typing.Callable[[dict], typing.Any]:
|
|
||||||
return itemgetter('project')
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def sort_key(cls) -> typing.Callable[[dict], typing.Any]:
|
|
||||||
return lambda node: node['project']['_id']
|
|
||||||
|
|
||||||
|
|
||||||
class UserGrouper(Grouper):
|
|
||||||
@classmethod
|
|
||||||
def label(cls, user):
|
|
||||||
return user['full_name']
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def group_key(cls) -> typing.Callable[[dict], typing.Any]:
|
|
||||||
return itemgetter('user')
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def sort_key(cls) -> typing.Callable[[dict], typing.Any]:
|
|
||||||
return lambda node: node['user']['_id']
|
|
||||||
|
|
||||||
|
|
||||||
class TimeLineBuilder:
|
|
||||||
def __init__(self, search_helper: SearchHelper, grouper: typing.Type[Grouper]):
|
|
||||||
self.search_helper = search_helper
|
|
||||||
self.grouper = grouper
|
|
||||||
self.continue_from = None
|
|
||||||
|
|
||||||
def build(self) -> TimelineDO:
|
|
||||||
raw_weeks = self.search_helper.raw_weeks_from_mongo()
|
|
||||||
clean_weeks = (self.create_week_group(week) for week in raw_weeks)
|
|
||||||
|
|
||||||
return TimelineDO(
|
|
||||||
groups=list(clean_weeks),
|
|
||||||
continue_from=self.continue_from.timestamp() if self.search_helper.has_more(self.continue_from) else None
|
|
||||||
)
|
|
||||||
|
|
||||||
def create_week_group(self, week: dict) -> GroupDO:
|
|
||||||
nodes = week['nodes']
|
|
||||||
nodes.sort(key=itemgetter('_created'), reverse=True)
|
|
||||||
self.update_continue_from(nodes)
|
|
||||||
groups = self.create_groups(nodes)
|
|
||||||
|
|
||||||
return GroupDO(
|
|
||||||
label=f'Week {week["_id"]["week"]}, {week["_id"]["year"]}',
|
|
||||||
groups=groups
|
|
||||||
)
|
|
||||||
|
|
||||||
def create_groups(self, nodes: typing.List[dict]) -> typing.List[GroupDO]:
|
|
||||||
self.sort_nodes(nodes) # groupby assumes that the list is sorted
|
|
||||||
nodes_grouped = itertools.groupby(nodes, self.grouper.group_key())
|
|
||||||
groups = (self.clean_group(grouped_by, group) for grouped_by, group in nodes_grouped)
|
|
||||||
groups_sorted = sorted(groups, key=self.group_row_sorter, reverse=True)
|
|
||||||
return groups_sorted
|
|
||||||
|
|
||||||
def sort_nodes(self, nodes: typing.List[dict]):
|
|
||||||
nodes.sort(key=itemgetter('node_type'))
|
|
||||||
nodes.sort(key=self.grouper.sort_key())
|
|
||||||
|
|
||||||
def update_continue_from(self, sorted_nodes: typing.List[dict]):
|
|
||||||
if self.search_helper.sort_direction == 'desc':
|
|
||||||
first_created = sorted_nodes[-1]['_created']
|
|
||||||
candidate = self.continue_from or first_created
|
|
||||||
self.continue_from = min(candidate, first_created)
|
|
||||||
else:
|
|
||||||
last_created = sorted_nodes[0]['_created']
|
|
||||||
candidate = self.continue_from or last_created
|
|
||||||
self.continue_from = max(candidate, last_created)
|
|
||||||
|
|
||||||
def clean_group(self, grouped_by: typing.Any, group: typing.Iterable[dict]) -> GroupDO:
|
|
||||||
items = self.create_items(group)
|
|
||||||
return GroupDO(
|
|
||||||
label=self.grouper.label(grouped_by),
|
|
||||||
url=self.grouper.url(grouped_by),
|
|
||||||
items=items
|
|
||||||
)
|
|
||||||
|
|
||||||
def create_items(self, group) -> typing.List[dict]:
|
|
||||||
by_node_type = itertools.groupby(group, key=itemgetter('node_type'))
|
|
||||||
items = {}
|
|
||||||
for node_type, nodes in by_node_type:
|
|
||||||
items[node_type] = [self.node_prettyfy(n) for n in nodes]
|
|
||||||
return items
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def node_prettyfy(cls, node: dict)-> dict:
|
|
||||||
duration_seconds = node['properties'].get('duration_seconds')
|
|
||||||
if duration_seconds is not None:
|
|
||||||
node['properties']['duration'] = pretty_duration(duration_seconds)
|
|
||||||
if node['node_type'] == 'post':
|
|
||||||
html = _get_markdowned_html(node['properties'], 'content')
|
|
||||||
html = shortcodes.render_commented(html, context=node['properties'])
|
|
||||||
node['properties']['pretty_content'] = html
|
|
||||||
return node
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def group_row_sorter(cls, row: GroupDO) -> typing.Tuple[datetime, datetime]:
|
|
||||||
'''
|
|
||||||
If a group contains posts are more interesting and therefor we put them higher in up
|
|
||||||
:param row:
|
|
||||||
:return: tuple with newest post date and newest asset date
|
|
||||||
'''
|
|
||||||
def newest_created(nodes: typing.List[dict]) -> datetime:
|
|
||||||
if nodes:
|
|
||||||
return nodes[0]['_created']
|
|
||||||
return datetime.fromtimestamp(0, tz=bson.tz_util.utc)
|
|
||||||
newest_post_date = newest_created(row.items.get('post'))
|
|
||||||
newest_asset_date = newest_created(row.items.get('asset'))
|
|
||||||
return newest_post_date, newest_asset_date
|
|
||||||
|
|
||||||
|
|
||||||
def _public_project_ids() -> typing.List[bson.ObjectId]:
|
|
||||||
"""Returns a list of ObjectIDs of public projects.
|
|
||||||
|
|
||||||
Memoized in setup_app().
|
|
||||||
"""
|
|
||||||
|
|
||||||
proj_coll = current_app.db('projects')
|
|
||||||
result = proj_coll.find({'is_private': False}, {'_id': 1})
|
|
||||||
return [p['_id'] for p in result]
|
|
||||||
|
|
||||||
|
|
||||||
def _get_markdowned_html(document: dict, field_name: str) -> str:
|
|
||||||
cache_field_name = pillar.markdown.cache_field_name(field_name)
|
|
||||||
html = document.get(cache_field_name)
|
|
||||||
if html is None:
|
|
||||||
markdown_src = document.get(field_name) or ''
|
|
||||||
html = pillar.markdown.markdown(markdown_src)
|
|
||||||
return html
|
|
||||||
|
|
||||||
|
|
||||||
@blueprint.route('/', methods=['GET'])
|
|
||||||
def global_timeline():
|
|
||||||
continue_from_str = request.args.get('from')
|
|
||||||
continue_from = parse_continue_from(continue_from_str)
|
|
||||||
nbr_of_weeks_str = request.args.get('weeksToLoad')
|
|
||||||
nbr_of_weeks = parse_nbr_of_weeks(nbr_of_weeks_str)
|
|
||||||
sort_direction = request.args.get('dir', 'desc')
|
|
||||||
return _global_timeline(continue_from, nbr_of_weeks, sort_direction)
|
|
||||||
|
|
||||||
|
|
||||||
@blueprint.route('/p/<string(length=24):pid_path>', methods=['GET'])
|
|
||||||
def project_timeline(pid_path: str):
|
|
||||||
continue_from_str = request.args.get('from')
|
|
||||||
continue_from = parse_continue_from(continue_from_str)
|
|
||||||
nbr_of_weeks_str = request.args.get('weeksToLoad')
|
|
||||||
nbr_of_weeks = parse_nbr_of_weeks(nbr_of_weeks_str)
|
|
||||||
sort_direction = request.args.get('dir', 'desc')
|
|
||||||
pid = str2id(pid_path)
|
|
||||||
return _project_timeline(continue_from, nbr_of_weeks, sort_direction, pid)
|
|
||||||
|
|
||||||
|
|
||||||
def parse_continue_from(from_arg) -> typing.Optional[datetime]:
|
|
||||||
try:
|
|
||||||
from_float = float(from_arg)
|
|
||||||
except (TypeError, ValueError):
|
|
||||||
return None
|
|
||||||
return datetime.fromtimestamp(from_float, tz=bson.tz_util.utc)
|
|
||||||
|
|
||||||
|
|
||||||
def parse_nbr_of_weeks(weeks_to_load: str) -> int:
|
|
||||||
try:
|
|
||||||
return int(weeks_to_load)
|
|
||||||
except (TypeError, ValueError):
|
|
||||||
return 3
|
|
||||||
|
|
||||||
|
|
||||||
def _global_timeline(continue_from: typing.Optional[datetime], nbr_of_weeks: int, sort_direction: str):
|
|
||||||
"""Returns an aggregated view of what has happened on the site
|
|
||||||
Memoized in setup_app().
|
|
||||||
|
|
||||||
:param continue_from: Python utc timestamp where to begin aggregation
|
|
||||||
|
|
||||||
:param nbr_of_weeks: Number of weeks to return
|
|
||||||
|
|
||||||
Example output:
|
|
||||||
{
|
|
||||||
groups: [{
|
|
||||||
label: 'Week 32',
|
|
||||||
groups: [{
|
|
||||||
label: 'Spring',
|
|
||||||
url: '/p/spring',
|
|
||||||
items:{
|
|
||||||
post: [blogPostDoc, blogPostDoc],
|
|
||||||
asset: [assetDoc, assetDoc]
|
|
||||||
},
|
|
||||||
groups: ...
|
|
||||||
}]
|
|
||||||
}],
|
|
||||||
continue_from: 123456.2 // python timestamp
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
builder = TimeLineBuilder(
|
|
||||||
SearchHelper(nbr_of_weeks, continue_from, _public_project_ids(), sort_direction),
|
|
||||||
ProjectGrouper
|
|
||||||
)
|
|
||||||
return jsonify_timeline(builder.build())
|
|
||||||
|
|
||||||
|
|
||||||
def jsonify_timeline(timeline: TimelineDO):
|
|
||||||
return jsonify(
|
|
||||||
attr.asdict(timeline,
|
|
||||||
recurse=True,
|
|
||||||
filter=lambda att, value: value is not None)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _project_timeline(continue_from: typing.Optional[datetime], nbr_of_weeks: int, sort_direction, pid: bson.ObjectId):
|
|
||||||
"""Returns an aggregated view of what has happened on the site
|
|
||||||
Memoized in setup_app().
|
|
||||||
|
|
||||||
:param continue_from: Python utc timestamp where to begin aggregation
|
|
||||||
|
|
||||||
:param nbr_of_weeks: Number of weeks to return
|
|
||||||
|
|
||||||
Example output:
|
|
||||||
{
|
|
||||||
groups: [{
|
|
||||||
label: 'Week 32',
|
|
||||||
groups: [{
|
|
||||||
label: 'Tobias Johansson',
|
|
||||||
items:{
|
|
||||||
post: [blogPostDoc, blogPostDoc],
|
|
||||||
asset: [assetDoc, assetDoc]
|
|
||||||
},
|
|
||||||
groups: ...
|
|
||||||
}]
|
|
||||||
}],
|
|
||||||
continue_from: 123456.2 // python timestamp
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
builder = TimeLineBuilder(
|
|
||||||
SearchHelper(nbr_of_weeks, continue_from, [pid], sort_direction),
|
|
||||||
UserGrouper
|
|
||||||
)
|
|
||||||
return jsonify_timeline(builder.build())
|
|
||||||
|
|
||||||
|
|
||||||
def setup_app(app, url_prefix):
|
|
||||||
global _public_project_ids
|
|
||||||
global _global_timeline
|
|
||||||
global _project_timeline
|
|
||||||
|
|
||||||
app.register_api_blueprint(blueprint, url_prefix=url_prefix)
|
|
||||||
cached = app.cache.cached(timeout=3600)
|
|
||||||
_public_project_ids = cached(_public_project_ids)
|
|
||||||
memoize = app.cache.memoize(timeout=60)
|
|
||||||
_global_timeline = memoize(_global_timeline)
|
|
||||||
_project_timeline = memoize(_project_timeline)
|
|
@ -61,9 +61,6 @@ def _update_search_user_changed_role(sender, user: dict):
|
|||||||
|
|
||||||
def setup_app(app, api_prefix):
|
def setup_app(app, api_prefix):
|
||||||
from pillar.api import service
|
from pillar.api import service
|
||||||
from . import patch
|
|
||||||
|
|
||||||
patch.setup_app(app, url_prefix=api_prefix)
|
|
||||||
|
|
||||||
app.on_pre_GET_users += hooks.check_user_access
|
app.on_pre_GET_users += hooks.check_user_access
|
||||||
app.on_post_GET_users += hooks.post_GET_user
|
app.on_post_GET_users += hooks.post_GET_user
|
||||||
|
@ -1,159 +0,0 @@
|
|||||||
import functools
|
|
||||||
import io
|
|
||||||
import logging
|
|
||||||
import mimetypes
|
|
||||||
import typing
|
|
||||||
|
|
||||||
from bson import ObjectId
|
|
||||||
from eve.methods.get import getitem_internal
|
|
||||||
import flask
|
|
||||||
|
|
||||||
from pillar import current_app
|
|
||||||
from pillar.api import blender_id
|
|
||||||
from pillar.api.blender_cloud import home_project
|
|
||||||
import pillar.api.file_storage
|
|
||||||
from werkzeug.datastructures import FileStorage
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
DEFAULT_AVATAR = 'assets/img/default_user_avatar.png'
|
|
||||||
|
|
||||||
|
|
||||||
def url(user: dict) -> str:
|
|
||||||
"""Return the avatar URL for this user.
|
|
||||||
|
|
||||||
:param user: dictionary from the MongoDB 'users' collection.
|
|
||||||
"""
|
|
||||||
assert isinstance(user, dict), f'user must be dict, not {type(user)}'
|
|
||||||
|
|
||||||
avatar_id = user.get('avatar', {}).get('file')
|
|
||||||
if not avatar_id:
|
|
||||||
return _default_avatar()
|
|
||||||
|
|
||||||
# The file may not exist, in which case we get an empty string back.
|
|
||||||
return pillar.api.file_storage.get_file_url(avatar_id) or _default_avatar()
|
|
||||||
|
|
||||||
|
|
||||||
@functools.lru_cache(maxsize=1)
|
|
||||||
def _default_avatar() -> str:
|
|
||||||
"""Return the URL path of the default avatar.
|
|
||||||
|
|
||||||
Doesn't change after the app has started, so we just cache it.
|
|
||||||
"""
|
|
||||||
return flask.url_for('static_pillar', filename=DEFAULT_AVATAR)
|
|
||||||
|
|
||||||
|
|
||||||
def _extension_for_mime(mime_type: str) -> str:
|
|
||||||
# Take the longest extension. I'd rather have '.jpeg' than the weird '.jpe'.
|
|
||||||
extensions: typing.List[str] = mimetypes.guess_all_extensions(mime_type)
|
|
||||||
|
|
||||||
try:
|
|
||||||
return max(extensions, key=len)
|
|
||||||
except ValueError:
|
|
||||||
# Raised when extensions is empty, e.g. when the mime type is unknown.
|
|
||||||
return ''
|
|
||||||
|
|
||||||
|
|
||||||
def _get_file_link(file_id: ObjectId) -> str:
|
|
||||||
# Get the file document via Eve to make it update the link.
|
|
||||||
file_doc, _, _, status = getitem_internal('files', _id=file_id)
|
|
||||||
assert status == 200
|
|
||||||
|
|
||||||
return file_doc['link']
|
|
||||||
|
|
||||||
|
|
||||||
def sync_avatar(user_id: ObjectId) -> str:
|
|
||||||
"""Fetch the user's avatar from Blender ID and save to storage.
|
|
||||||
|
|
||||||
Errors are logged but do not raise an exception.
|
|
||||||
|
|
||||||
:return: the link to the avatar, or '' if it was not processed.
|
|
||||||
"""
|
|
||||||
|
|
||||||
users_coll = current_app.db('users')
|
|
||||||
db_user = users_coll.find_one({'_id': user_id})
|
|
||||||
old_avatar_info = db_user.get('avatar', {})
|
|
||||||
if isinstance(old_avatar_info, ObjectId):
|
|
||||||
old_avatar_info = {'file': old_avatar_info}
|
|
||||||
|
|
||||||
home_proj = home_project.get_home_project(user_id)
|
|
||||||
if not home_project:
|
|
||||||
log.error('Home project of user %s does not exist, unable to store avatar', user_id)
|
|
||||||
return ''
|
|
||||||
|
|
||||||
bid_userid = blender_id.get_user_blenderid(db_user)
|
|
||||||
if not bid_userid:
|
|
||||||
log.error('User %s has no Blender ID user-id, unable to fetch avatar', user_id)
|
|
||||||
return ''
|
|
||||||
|
|
||||||
avatar_url = blender_id.avatar_url(bid_userid)
|
|
||||||
bid_session = blender_id.Session()
|
|
||||||
|
|
||||||
# Avoid re-downloading the same avatar.
|
|
||||||
request_headers = {}
|
|
||||||
if avatar_url == old_avatar_info.get('last_downloaded_url') and \
|
|
||||||
old_avatar_info.get('last_modified'):
|
|
||||||
request_headers['If-Modified-Since'] = old_avatar_info.get('last_modified')
|
|
||||||
|
|
||||||
log.info('Downloading avatar for user %s from %s', user_id, avatar_url)
|
|
||||||
resp = bid_session.get(avatar_url, headers=request_headers, allow_redirects=True)
|
|
||||||
if resp.status_code == 304:
|
|
||||||
# File was not modified, we can keep the old file.
|
|
||||||
log.debug('Avatar for user %s was not modified on Blender ID, not re-downloading', user_id)
|
|
||||||
return _get_file_link(old_avatar_info['file'])
|
|
||||||
|
|
||||||
resp.raise_for_status()
|
|
||||||
|
|
||||||
mime_type = resp.headers['Content-Type']
|
|
||||||
file_extension = _extension_for_mime(mime_type)
|
|
||||||
if not file_extension:
|
|
||||||
log.error('No file extension known for mime type %s, unable to handle avatar of user %s',
|
|
||||||
mime_type, user_id)
|
|
||||||
return ''
|
|
||||||
|
|
||||||
filename = f'avatar-{user_id}{file_extension}'
|
|
||||||
fake_local_file = io.BytesIO(resp.content)
|
|
||||||
fake_local_file.name = filename
|
|
||||||
|
|
||||||
# Act as if this file was just uploaded by the user, so we can reuse
|
|
||||||
# existing Pillar file-handling code.
|
|
||||||
log.debug("Uploading avatar for user %s to storage", user_id)
|
|
||||||
uploaded_file = FileStorage(
|
|
||||||
stream=fake_local_file,
|
|
||||||
filename=filename,
|
|
||||||
headers=resp.headers,
|
|
||||||
content_type=mime_type,
|
|
||||||
content_length=resp.headers['Content-Length'],
|
|
||||||
)
|
|
||||||
|
|
||||||
with pillar.auth.temporary_user(db_user):
|
|
||||||
upload_data = pillar.api.file_storage.upload_and_process(
|
|
||||||
fake_local_file,
|
|
||||||
uploaded_file,
|
|
||||||
str(home_proj['_id']),
|
|
||||||
# Disallow image processing, as it's a tiny file anyway and
|
|
||||||
# we'll just serve the original.
|
|
||||||
may_process_file=False,
|
|
||||||
)
|
|
||||||
file_id = ObjectId(upload_data['file_id'])
|
|
||||||
|
|
||||||
avatar_info = {
|
|
||||||
'file': file_id,
|
|
||||||
'last_downloaded_url': resp.url,
|
|
||||||
'last_modified': resp.headers.get('Last-Modified'),
|
|
||||||
}
|
|
||||||
|
|
||||||
# Update the user to store the reference to their avatar.
|
|
||||||
old_avatar_file_id = old_avatar_info.get('file')
|
|
||||||
update_result = users_coll.update_one({'_id': user_id},
|
|
||||||
{'$set': {'avatar': avatar_info}})
|
|
||||||
if update_result.matched_count == 1:
|
|
||||||
log.debug('Updated avatar for user ID %s to file %s', user_id, file_id)
|
|
||||||
else:
|
|
||||||
log.warning('Matched %d users while setting avatar for user ID %s to file %s',
|
|
||||||
update_result.matched_count, user_id, file_id)
|
|
||||||
|
|
||||||
if old_avatar_file_id:
|
|
||||||
current_app.delete_internal('files', _id=old_avatar_file_id)
|
|
||||||
|
|
||||||
return _get_file_link(file_id)
|
|
@ -1,12 +1,13 @@
|
|||||||
import copy
|
import copy
|
||||||
import json
|
import json
|
||||||
|
|
||||||
|
import bson
|
||||||
from eve.utils import parse_request
|
from eve.utils import parse_request
|
||||||
from werkzeug import exceptions as wz_exceptions
|
from werkzeug import exceptions as wz_exceptions
|
||||||
|
|
||||||
from pillar import current_app
|
from pillar import current_app
|
||||||
from pillar.api.users.routes import log
|
from pillar.api.users.routes import log
|
||||||
import pillar.api.users.avatar
|
from pillar.api.utils.authorization import user_has_role
|
||||||
import pillar.auth
|
import pillar.auth
|
||||||
|
|
||||||
USER_EDITABLE_FIELDS = {'full_name', 'username', 'email', 'settings'}
|
USER_EDITABLE_FIELDS = {'full_name', 'username', 'email', 'settings'}
|
||||||
@ -125,7 +126,7 @@ def check_put_access(request, lookup):
|
|||||||
raise wz_exceptions.Forbidden()
|
raise wz_exceptions.Forbidden()
|
||||||
|
|
||||||
|
|
||||||
def after_fetching_user(user: dict) -> None:
|
def after_fetching_user(user):
|
||||||
# Deny access to auth block; authentication stuff is managed by
|
# Deny access to auth block; authentication stuff is managed by
|
||||||
# custom end-points.
|
# custom end-points.
|
||||||
user.pop('auth', None)
|
user.pop('auth', None)
|
||||||
@ -141,7 +142,7 @@ def after_fetching_user(user: dict) -> None:
|
|||||||
return
|
return
|
||||||
|
|
||||||
# Remove all fields except public ones.
|
# Remove all fields except public ones.
|
||||||
public_fields = {'full_name', 'username', 'email', 'extension_props_public', 'badges'}
|
public_fields = {'full_name', 'username', 'email', 'extension_props_public'}
|
||||||
for field in list(user.keys()):
|
for field in list(user.keys()):
|
||||||
if field not in public_fields:
|
if field not in public_fields:
|
||||||
del user[field]
|
del user[field]
|
||||||
|
@ -1,45 +0,0 @@
|
|||||||
"""User patching support."""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
|
|
||||||
import bson
|
|
||||||
from flask import Blueprint
|
|
||||||
import werkzeug.exceptions as wz_exceptions
|
|
||||||
|
|
||||||
from pillar import current_app
|
|
||||||
from pillar.auth import current_user
|
|
||||||
from pillar.api.utils import authorization, jsonify, remove_private_keys
|
|
||||||
from pillar.api import patch_handler
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
patch_api_blueprint = Blueprint('users.patch', __name__)
|
|
||||||
|
|
||||||
|
|
||||||
class UserPatchHandler(patch_handler.AbstractPatchHandler):
|
|
||||||
item_name = 'user'
|
|
||||||
|
|
||||||
@authorization.require_login()
|
|
||||||
def patch_set_username(self, user_id: bson.ObjectId, patch: dict):
|
|
||||||
"""Updates a user's username."""
|
|
||||||
if user_id != current_user.user_id:
|
|
||||||
log.info('User %s tried to change username of user %s',
|
|
||||||
current_user.user_id, user_id)
|
|
||||||
raise wz_exceptions.Forbidden('You may only change your own username')
|
|
||||||
|
|
||||||
new_username = patch['username']
|
|
||||||
log.info('User %s uses PATCH to set username to %r', current_user.user_id, new_username)
|
|
||||||
|
|
||||||
users_coll = current_app.db('users')
|
|
||||||
db_user = users_coll.find_one({'_id': user_id})
|
|
||||||
db_user['username'] = new_username
|
|
||||||
|
|
||||||
# Save via Eve to check the schema and trigger update hooks.
|
|
||||||
response, _, _, status = current_app.put_internal(
|
|
||||||
'users', remove_private_keys(db_user), _id=user_id)
|
|
||||||
|
|
||||||
return jsonify(response), status
|
|
||||||
|
|
||||||
|
|
||||||
def setup_app(app, url_prefix):
|
|
||||||
UserPatchHandler(patch_api_blueprint)
|
|
||||||
app.register_api_blueprint(patch_api_blueprint, url_prefix=url_prefix)
|
|
@ -1,11 +1,9 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
from eve.methods.get import get
|
from eve.methods.get import get
|
||||||
from flask import Blueprint, request
|
from flask import Blueprint
|
||||||
import werkzeug.exceptions as wz_exceptions
|
|
||||||
|
|
||||||
from pillar import current_app
|
from pillar.api.utils import jsonify
|
||||||
from pillar.api import utils
|
|
||||||
from pillar.api.utils.authorization import require_login
|
from pillar.api.utils.authorization import require_login
|
||||||
from pillar.auth import current_user
|
from pillar.auth import current_user
|
||||||
|
|
||||||
@ -17,128 +15,7 @@ blueprint_api = Blueprint('users_api', __name__)
|
|||||||
@require_login()
|
@require_login()
|
||||||
def my_info():
|
def my_info():
|
||||||
eve_resp, _, _, status, _ = get('users', {'_id': current_user.user_id})
|
eve_resp, _, _, status, _ = get('users', {'_id': current_user.user_id})
|
||||||
resp = utils.jsonify(eve_resp['_items'][0], status=status)
|
resp = jsonify(eve_resp['_items'][0], status=status)
|
||||||
return resp
|
return resp
|
||||||
|
|
||||||
|
|
||||||
@blueprint_api.route('/video/<video_id>/progress')
|
|
||||||
@require_login()
|
|
||||||
def get_video_progress(video_id: str):
|
|
||||||
"""Return video progress information.
|
|
||||||
|
|
||||||
Either a `204 No Content` is returned (no information stored),
|
|
||||||
or a `200 Ok` with JSON from Eve's 'users' schema, from the key
|
|
||||||
video.view_progress.<video_id>.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Validation of the video ID; raises a BadRequest when it's not an ObjectID.
|
|
||||||
# This isn't strictly necessary, but it makes this function behave symmetrical
|
|
||||||
# to the set_video_progress() function.
|
|
||||||
utils.str2id(video_id)
|
|
||||||
|
|
||||||
users_coll = current_app.db('users')
|
|
||||||
user_doc = users_coll.find_one(current_user.user_id, projection={'nodes.view_progress': True})
|
|
||||||
try:
|
|
||||||
progress = user_doc['nodes']['view_progress'][video_id]
|
|
||||||
except KeyError:
|
|
||||||
return '', 204
|
|
||||||
if not progress:
|
|
||||||
return '', 204
|
|
||||||
|
|
||||||
return utils.jsonify(progress)
|
|
||||||
|
|
||||||
|
|
||||||
@blueprint_api.route('/video/<video_id>/progress', methods=['POST'])
|
|
||||||
@require_login()
|
|
||||||
def set_video_progress(video_id: str):
|
|
||||||
"""Save progress information about a certain video.
|
|
||||||
|
|
||||||
Expected parameters:
|
|
||||||
- progress_in_sec: float number of seconds
|
|
||||||
- progress_in_perc: integer percentage of video watched (interval [0-100])
|
|
||||||
"""
|
|
||||||
my_log = log.getChild('set_video_progress')
|
|
||||||
my_log.debug('Setting video progress for user %r video %r', current_user.user_id, video_id)
|
|
||||||
|
|
||||||
# Constructing this response requires an active app, and thus can't be done on module load.
|
|
||||||
no_video_response = utils.jsonify({'_message': 'No such video'}, status=404)
|
|
||||||
|
|
||||||
try:
|
|
||||||
progress_in_sec = float(request.form['progress_in_sec'])
|
|
||||||
progress_in_perc = int(request.form['progress_in_perc'])
|
|
||||||
except KeyError as ex:
|
|
||||||
my_log.debug('Missing POST field in request: %s', ex)
|
|
||||||
raise wz_exceptions.BadRequest(f'missing a form field')
|
|
||||||
except ValueError as ex:
|
|
||||||
my_log.debug('Invalid value for POST field in request: %s', ex)
|
|
||||||
raise wz_exceptions.BadRequest(f'Invalid value for field: {ex}')
|
|
||||||
|
|
||||||
users_coll = current_app.db('users')
|
|
||||||
nodes_coll = current_app.db('nodes')
|
|
||||||
|
|
||||||
# First check whether this is actually an existing video
|
|
||||||
video_oid = utils.str2id(video_id)
|
|
||||||
video_doc = nodes_coll.find_one(video_oid, projection={
|
|
||||||
'node_type': True,
|
|
||||||
'properties.content_type': True,
|
|
||||||
'properties.file': True,
|
|
||||||
})
|
|
||||||
if not video_doc:
|
|
||||||
my_log.debug('Node %r not found, unable to set progress for user %r',
|
|
||||||
video_oid, current_user.user_id)
|
|
||||||
return no_video_response
|
|
||||||
|
|
||||||
try:
|
|
||||||
is_video = (video_doc['node_type'] == 'asset'
|
|
||||||
and video_doc['properties']['content_type'] == 'video')
|
|
||||||
except KeyError:
|
|
||||||
is_video = False
|
|
||||||
|
|
||||||
if not is_video:
|
|
||||||
my_log.info('Node %r is not a video, unable to set progress for user %r',
|
|
||||||
video_oid, current_user.user_id)
|
|
||||||
# There is no video found at this URL, so act as if it doesn't even exist.
|
|
||||||
return no_video_response
|
|
||||||
|
|
||||||
# Compute the progress
|
|
||||||
percent = min(100, max(0, progress_in_perc))
|
|
||||||
progress = {
|
|
||||||
'progress_in_sec': progress_in_sec,
|
|
||||||
'progress_in_percent': percent,
|
|
||||||
'last_watched': utils.utcnow(),
|
|
||||||
}
|
|
||||||
|
|
||||||
# After watching a certain percentage of the video, we consider it 'done'
|
|
||||||
#
|
|
||||||
# Total Credit start Total Credit Percent
|
|
||||||
# HH:MM:SS HH:MM:SS sec sec of duration
|
|
||||||
# Sintel 00:14:48 00:12:24 888 744 83.78%
|
|
||||||
# Tears of Steel 00:12:14 00:09:49 734 589 80.25%
|
|
||||||
# Cosmos Laundro 00:12:10 00:10:05 730 605 82.88%
|
|
||||||
# Agent 327 00:03:51 00:03:26 231 206 89.18%
|
|
||||||
# Caminandes 3 00:02:30 00:02:18 150 138 92.00%
|
|
||||||
# Glass Half 00:03:13 00:02:52 193 172 89.12%
|
|
||||||
# Big Buck Bunny 00:09:56 00:08:11 596 491 82.38%
|
|
||||||
# Elephant’s Drea 00:10:54 00:09:25 654 565 86.39%
|
|
||||||
#
|
|
||||||
# Median 85.09%
|
|
||||||
# Average 85.75%
|
|
||||||
#
|
|
||||||
# For training videos marking at done at 85% of the video may be a bit
|
|
||||||
# early, since those probably won't have (long) credits. This is why we
|
|
||||||
# stick to 90% here.
|
|
||||||
if percent >= 90:
|
|
||||||
progress['done'] = True
|
|
||||||
|
|
||||||
# Setting each property individually prevents us from overwriting any
|
|
||||||
# existing {done: true} fields.
|
|
||||||
updates = {f'nodes.view_progress.{video_id}.{k}': v
|
|
||||||
for k, v in progress.items()}
|
|
||||||
result = users_coll.update_one({'_id': current_user.user_id},
|
|
||||||
{'$set': updates})
|
|
||||||
|
|
||||||
if result.matched_count == 0:
|
|
||||||
my_log.error('Current user %r could not be updated', current_user.user_id)
|
|
||||||
raise wz_exceptions.InternalServerError('Unable to find logged-in user')
|
|
||||||
|
|
||||||
return '', 204
|
|
||||||
|
@ -8,7 +8,6 @@ import logging
|
|||||||
import random
|
import random
|
||||||
import typing
|
import typing
|
||||||
import urllib.request, urllib.parse, urllib.error
|
import urllib.request, urllib.parse, urllib.error
|
||||||
import warnings
|
|
||||||
|
|
||||||
import bson.objectid
|
import bson.objectid
|
||||||
import bson.tz_util
|
import bson.tz_util
|
||||||
@ -45,16 +44,10 @@ def remove_private_keys(document):
|
|||||||
"""Removes any key that starts with an underscore, returns result as new
|
"""Removes any key that starts with an underscore, returns result as new
|
||||||
dictionary.
|
dictionary.
|
||||||
"""
|
"""
|
||||||
def do_remove(doc):
|
|
||||||
for key in list(doc.keys()):
|
|
||||||
if key.startswith('_'):
|
|
||||||
del doc[key]
|
|
||||||
elif isinstance(doc[key], dict):
|
|
||||||
doc[key] = do_remove(doc[key])
|
|
||||||
return doc
|
|
||||||
|
|
||||||
doc_copy = copy.deepcopy(document)
|
doc_copy = copy.deepcopy(document)
|
||||||
do_remove(doc_copy)
|
for key in list(doc_copy.keys()):
|
||||||
|
if key.startswith('_'):
|
||||||
|
del doc_copy[key]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
del doc_copy['allowed_methods']
|
del doc_copy['allowed_methods']
|
||||||
@ -64,39 +57,6 @@ def remove_private_keys(document):
|
|||||||
return doc_copy
|
return doc_copy
|
||||||
|
|
||||||
|
|
||||||
def pretty_duration(seconds: typing.Union[None, int, float]):
|
|
||||||
if seconds is None:
|
|
||||||
return ''
|
|
||||||
seconds = round(seconds)
|
|
||||||
hours, seconds = divmod(seconds, 3600)
|
|
||||||
minutes, seconds = divmod(seconds, 60)
|
|
||||||
if hours > 0:
|
|
||||||
return f'{hours:02}:{minutes:02}:{seconds:02}'
|
|
||||||
else:
|
|
||||||
return f'{minutes:02}:{seconds:02}'
|
|
||||||
|
|
||||||
|
|
||||||
def pretty_duration_fractional(seconds: typing.Union[None, int, float]):
|
|
||||||
if seconds is None:
|
|
||||||
return ''
|
|
||||||
|
|
||||||
# Remove fraction of seconds from the seconds so that the rest is done as integers.
|
|
||||||
seconds, fracs = divmod(seconds, 1)
|
|
||||||
hours, seconds = divmod(int(seconds), 3600)
|
|
||||||
minutes, seconds = divmod(seconds, 60)
|
|
||||||
msec = int(round(fracs * 1000))
|
|
||||||
|
|
||||||
if msec == 0:
|
|
||||||
msec_str = ''
|
|
||||||
else:
|
|
||||||
msec_str = f'.{msec:03}'
|
|
||||||
|
|
||||||
if hours > 0:
|
|
||||||
return f'{hours:02}:{minutes:02}:{seconds:02}{msec_str}'
|
|
||||||
else:
|
|
||||||
return f'{minutes:02}:{seconds:02}{msec_str}'
|
|
||||||
|
|
||||||
|
|
||||||
class PillarJSONEncoder(json.JSONEncoder):
|
class PillarJSONEncoder(json.JSONEncoder):
|
||||||
"""JSON encoder with support for Pillar resources."""
|
"""JSON encoder with support for Pillar resources."""
|
||||||
|
|
||||||
@ -104,9 +64,6 @@ class PillarJSONEncoder(json.JSONEncoder):
|
|||||||
if isinstance(obj, datetime.datetime):
|
if isinstance(obj, datetime.datetime):
|
||||||
return obj.strftime(RFC1123_DATE_FORMAT)
|
return obj.strftime(RFC1123_DATE_FORMAT)
|
||||||
|
|
||||||
if isinstance(obj, datetime.timedelta):
|
|
||||||
return pretty_duration(obj.total_seconds())
|
|
||||||
|
|
||||||
if isinstance(obj, bson.ObjectId):
|
if isinstance(obj, bson.ObjectId):
|
||||||
return str(obj)
|
return str(obj)
|
||||||
|
|
||||||
@ -187,16 +144,6 @@ def str2id(document_id: str) -> bson.ObjectId:
|
|||||||
|
|
||||||
|
|
||||||
def gravatar(email: str, size=64) -> typing.Optional[str]:
|
def gravatar(email: str, size=64) -> typing.Optional[str]:
|
||||||
"""Deprecated: return the Gravatar URL.
|
|
||||||
|
|
||||||
.. deprecated::
|
|
||||||
Use of Gravatar is deprecated, in favour of our self-hosted avatars.
|
|
||||||
See pillar.api.users.avatar.url(user).
|
|
||||||
"""
|
|
||||||
warnings.warn('pillar.api.utils.gravatar() is deprecated, '
|
|
||||||
'use pillar.api.users.avatar.url() instead',
|
|
||||||
category=DeprecationWarning)
|
|
||||||
|
|
||||||
if email is None:
|
if email is None:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@ -234,8 +181,7 @@ def doc_diff(doc1, doc2, *, falsey_is_equal=True, superkey: str = None):
|
|||||||
function won't report differences between DoesNotExist, False, '', and 0.
|
function won't report differences between DoesNotExist, False, '', and 0.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def is_private(key):
|
private_keys = {'_id', '_etag', '_deleted', '_updated', '_created'}
|
||||||
return str(key).startswith('_')
|
|
||||||
|
|
||||||
def combine_key(some_key):
|
def combine_key(some_key):
|
||||||
"""Combine this key with the superkey.
|
"""Combine this key with the superkey.
|
||||||
@ -256,7 +202,7 @@ def doc_diff(doc1, doc2, *, falsey_is_equal=True, superkey: str = None):
|
|||||||
|
|
||||||
if isinstance(doc1, dict) and isinstance(doc2, dict):
|
if isinstance(doc1, dict) and isinstance(doc2, dict):
|
||||||
for key in set(doc1.keys()).union(set(doc2.keys())):
|
for key in set(doc1.keys()).union(set(doc2.keys())):
|
||||||
if is_private(key):
|
if key in private_keys:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
val1 = doc1.get(key, DoesNotExist)
|
val1 = doc1.get(key, DoesNotExist)
|
||||||
@ -299,10 +245,4 @@ def random_etag() -> str:
|
|||||||
|
|
||||||
|
|
||||||
def utcnow() -> datetime.datetime:
|
def utcnow() -> datetime.datetime:
|
||||||
"""Construct timezone-aware 'now' in UTC with millisecond precision."""
|
return datetime.datetime.now(tz=bson.tz_util.utc)
|
||||||
now = datetime.datetime.now(tz=bson.tz_util.utc)
|
|
||||||
|
|
||||||
# MongoDB stores in millisecond precision, so truncate the microseconds.
|
|
||||||
# This way the returned datetime can be round-tripped via MongoDB and stay the same.
|
|
||||||
trunc_now = now.replace(microsecond=now.microsecond - (now.microsecond % 1000))
|
|
||||||
return trunc_now
|
|
||||||
|
@ -13,7 +13,7 @@ import logging
|
|||||||
import typing
|
import typing
|
||||||
|
|
||||||
import bson
|
import bson
|
||||||
from flask import g, current_app, session
|
from flask import g, current_app
|
||||||
from flask import request
|
from flask import request
|
||||||
from werkzeug import exceptions as wz_exceptions
|
from werkzeug import exceptions as wz_exceptions
|
||||||
|
|
||||||
@ -103,7 +103,7 @@ def find_user_in_db(user_info: dict, provider='blender-id') -> dict:
|
|||||||
return db_user
|
return db_user
|
||||||
|
|
||||||
|
|
||||||
def validate_token(*, force=False) -> bool:
|
def validate_token(*, force=False):
|
||||||
"""Validate the token provided in the request and populate the current_user
|
"""Validate the token provided in the request and populate the current_user
|
||||||
flask.g object, so that permissions and access to a resource can be defined
|
flask.g object, so that permissions and access to a resource can be defined
|
||||||
from it.
|
from it.
|
||||||
@ -115,7 +115,7 @@ def validate_token(*, force=False) -> bool:
|
|||||||
:returns: True iff the user is logged in with a valid Blender ID token.
|
:returns: True iff the user is logged in with a valid Blender ID token.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import pillar.auth
|
from pillar.auth import AnonymousUser
|
||||||
|
|
||||||
# Trust a pre-existing g.current_user
|
# Trust a pre-existing g.current_user
|
||||||
if not force:
|
if not force:
|
||||||
@ -133,22 +133,16 @@ def validate_token(*, force=False) -> bool:
|
|||||||
oauth_subclient = ''
|
oauth_subclient = ''
|
||||||
else:
|
else:
|
||||||
# Check the session, the user might be logged in through Flask-Login.
|
# Check the session, the user might be logged in through Flask-Login.
|
||||||
|
from pillar import auth
|
||||||
|
|
||||||
# The user has a logged-in session; trust only if this request passes a CSRF check.
|
token = auth.get_blender_id_oauth_token()
|
||||||
# FIXME(Sybren): we should stop saving the token as 'user_id' in the sesion.
|
|
||||||
token = session.get('user_id')
|
|
||||||
if token:
|
|
||||||
log.debug('skipping token check because current user already has a session')
|
|
||||||
current_app.csrf.protect()
|
|
||||||
else:
|
|
||||||
token = pillar.auth.get_blender_id_oauth_token()
|
|
||||||
oauth_subclient = None
|
oauth_subclient = None
|
||||||
|
|
||||||
if not token:
|
if not token:
|
||||||
# If no authorization headers are provided, we are getting a request
|
# If no authorization headers are provided, we are getting a request
|
||||||
# from a non logged in user. Proceed accordingly.
|
# from a non logged in user. Proceed accordingly.
|
||||||
log.debug('No authentication headers, so not logged in.')
|
log.debug('No authentication headers, so not logged in.')
|
||||||
g.current_user = pillar.auth.AnonymousUser()
|
g.current_user = AnonymousUser()
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return validate_this_token(token, oauth_subclient) is not None
|
return validate_this_token(token, oauth_subclient) is not None
|
||||||
@ -169,6 +163,8 @@ def validate_this_token(token, oauth_subclient=None):
|
|||||||
# Check the users to see if there is one with this Blender ID token.
|
# Check the users to see if there is one with this Blender ID token.
|
||||||
db_token = find_token(token, oauth_subclient)
|
db_token = find_token(token, oauth_subclient)
|
||||||
if not db_token:
|
if not db_token:
|
||||||
|
log.debug('Token %r not found in our local database.', token)
|
||||||
|
|
||||||
# If no valid token is found in our local database, we issue a new
|
# If no valid token is found in our local database, we issue a new
|
||||||
# request to the Blender ID server to verify the validity of the token
|
# request to the Blender ID server to verify the validity of the token
|
||||||
# passed via the HTTP header. We will get basic user info if the user
|
# passed via the HTTP header. We will get basic user info if the user
|
||||||
@ -187,7 +183,7 @@ def validate_this_token(token, oauth_subclient=None):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
g.current_user = UserClass.construct(token, db_user)
|
g.current_user = UserClass.construct(token, db_user)
|
||||||
user_authenticated.send(g.current_user)
|
user_authenticated.send(None)
|
||||||
|
|
||||||
return db_user
|
return db_user
|
||||||
|
|
||||||
@ -198,7 +194,7 @@ def remove_token(token: str):
|
|||||||
tokens_coll = current_app.db('tokens')
|
tokens_coll = current_app.db('tokens')
|
||||||
token_hashed = hash_auth_token(token)
|
token_hashed = hash_auth_token(token)
|
||||||
|
|
||||||
# TODO: remove matching on hashed tokens once all hashed tokens have expired.
|
# TODO: remove matching on unhashed tokens once all tokens have been hashed.
|
||||||
lookup = {'$or': [{'token': token}, {'token_hashed': token_hashed}]}
|
lookup = {'$or': [{'token': token}, {'token_hashed': token_hashed}]}
|
||||||
del_res = tokens_coll.delete_many(lookup)
|
del_res = tokens_coll.delete_many(lookup)
|
||||||
log.debug('Removed token %r, matched %d documents', token, del_res.deleted_count)
|
log.debug('Removed token %r, matched %d documents', token, del_res.deleted_count)
|
||||||
@ -210,7 +206,7 @@ def find_token(token, is_subclient_token=False, **extra_filters):
|
|||||||
tokens_coll = current_app.db('tokens')
|
tokens_coll = current_app.db('tokens')
|
||||||
token_hashed = hash_auth_token(token)
|
token_hashed = hash_auth_token(token)
|
||||||
|
|
||||||
# TODO: remove matching on hashed tokens once all hashed tokens have expired.
|
# TODO: remove matching on unhashed tokens once all tokens have been hashed.
|
||||||
lookup = {'$or': [{'token': token}, {'token_hashed': token_hashed}],
|
lookup = {'$or': [{'token': token}, {'token_hashed': token_hashed}],
|
||||||
'is_subclient_token': True if is_subclient_token else {'$in': [False, None]},
|
'is_subclient_token': True if is_subclient_token else {'$in': [False, None]},
|
||||||
'expire_time': {"$gt": utcnow()}}
|
'expire_time': {"$gt": utcnow()}}
|
||||||
@ -233,14 +229,8 @@ def hash_auth_token(token: str) -> str:
|
|||||||
return base64.b64encode(digest).decode('ascii')
|
return base64.b64encode(digest).decode('ascii')
|
||||||
|
|
||||||
|
|
||||||
def store_token(user_id,
|
def store_token(user_id, token: str, token_expiry, oauth_subclient_id=False,
|
||||||
token: str,
|
org_roles: typing.Set[str] = frozenset()):
|
||||||
token_expiry,
|
|
||||||
oauth_subclient_id=False,
|
|
||||||
*,
|
|
||||||
org_roles: typing.Set[str] = frozenset(),
|
|
||||||
oauth_scopes: typing.Optional[typing.List[str]] = None,
|
|
||||||
):
|
|
||||||
"""Stores an authentication token.
|
"""Stores an authentication token.
|
||||||
|
|
||||||
:returns: the token document from MongoDB
|
:returns: the token document from MongoDB
|
||||||
@ -250,15 +240,13 @@ def store_token(user_id,
|
|||||||
|
|
||||||
token_data = {
|
token_data = {
|
||||||
'user': user_id,
|
'user': user_id,
|
||||||
'token': token,
|
'token_hashed': hash_auth_token(token),
|
||||||
'expire_time': token_expiry,
|
'expire_time': token_expiry,
|
||||||
}
|
}
|
||||||
if oauth_subclient_id:
|
if oauth_subclient_id:
|
||||||
token_data['is_subclient_token'] = True
|
token_data['is_subclient_token'] = True
|
||||||
if org_roles:
|
if org_roles:
|
||||||
token_data['org_roles'] = sorted(org_roles)
|
token_data['org_roles'] = sorted(org_roles)
|
||||||
if oauth_scopes:
|
|
||||||
token_data['oauth_scopes'] = oauth_scopes
|
|
||||||
|
|
||||||
r, _, _, status = current_app.post_internal('tokens', token_data)
|
r, _, _, status = current_app.post_internal('tokens', token_data)
|
||||||
|
|
||||||
@ -375,10 +363,6 @@ def current_user():
|
|||||||
def setup_app(app):
|
def setup_app(app):
|
||||||
@app.before_request
|
@app.before_request
|
||||||
def validate_token_at_each_request():
|
def validate_token_at_each_request():
|
||||||
# Skip token validation if this is a static asset
|
|
||||||
# to avoid spamming Blender ID for no good reason
|
|
||||||
if request.path.startswith('/static/'):
|
|
||||||
return
|
|
||||||
validate_token()
|
validate_token()
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
import logging
|
import logging
|
||||||
import functools
|
import functools
|
||||||
import typing
|
|
||||||
|
|
||||||
from bson import ObjectId
|
from bson import ObjectId
|
||||||
from flask import g
|
from flask import g
|
||||||
@ -13,9 +12,8 @@ CHECK_PERMISSIONS_IMPLEMENTED_FOR = {'projects', 'nodes', 'flamenco_jobs'}
|
|||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def check_permissions(collection_name: str, resource: dict, method: str,
|
def check_permissions(collection_name, resource, method, append_allowed_methods=False,
|
||||||
append_allowed_methods=False,
|
check_node_type=None):
|
||||||
check_node_type: typing.Optional[str] = None):
|
|
||||||
"""Check user permissions to access a node. We look up node permissions from
|
"""Check user permissions to access a node. We look up node permissions from
|
||||||
world to groups to users and match them with the computed user permissions.
|
world to groups to users and match them with the computed user permissions.
|
||||||
If there is not match, we raise 403.
|
If there is not match, we raise 403.
|
||||||
@ -95,9 +93,8 @@ def compute_allowed_methods(collection_name, resource, check_node_type=None):
|
|||||||
return allowed_methods
|
return allowed_methods
|
||||||
|
|
||||||
|
|
||||||
def has_permissions(collection_name: str, resource: dict, method: str,
|
def has_permissions(collection_name, resource, method, append_allowed_methods=False,
|
||||||
append_allowed_methods=False,
|
check_node_type=None):
|
||||||
check_node_type: typing.Optional[str] = None):
|
|
||||||
"""Check user permissions to access a node. We look up node permissions from
|
"""Check user permissions to access a node. We look up node permissions from
|
||||||
world to groups to users and match them with the computed user permissions.
|
world to groups to users and match them with the computed user permissions.
|
||||||
|
|
||||||
@ -331,9 +328,8 @@ def require_login(*, require_roles=set(),
|
|||||||
|
|
||||||
def render_error() -> Response:
|
def render_error() -> Response:
|
||||||
if error_view is None:
|
if error_view is None:
|
||||||
resp = Forbidden().get_response()
|
abort(403)
|
||||||
else:
|
resp: Response = error_view()
|
||||||
resp = error_view()
|
|
||||||
resp.status_code = 403
|
resp.status_code = 403
|
||||||
return resp
|
return resp
|
||||||
|
|
||||||
|
@ -9,8 +9,12 @@ string = functools.partial(attr.ib, validator=attr.validators.instance_of(str))
|
|||||||
|
|
||||||
|
|
||||||
def log(name):
|
def log(name):
|
||||||
"""Returns a logger
|
"""Returns a logger attr.ib
|
||||||
|
|
||||||
:param name: name to pass to logging.getLogger()
|
:param name: name to pass to logging.getLogger()
|
||||||
|
:rtype: attr.ib
|
||||||
"""
|
"""
|
||||||
return logging.getLogger(name)
|
return attr.ib(default=logging.getLogger(name),
|
||||||
|
repr=False,
|
||||||
|
hash=False,
|
||||||
|
cmp=False)
|
||||||
|
@ -1,24 +1,18 @@
|
|||||||
"""Authentication code common to the web and api modules."""
|
"""Authentication code common to the web and api modules."""
|
||||||
|
|
||||||
import collections
|
import collections
|
||||||
import contextlib
|
|
||||||
import copy
|
|
||||||
import functools
|
|
||||||
import logging
|
import logging
|
||||||
import typing
|
import typing
|
||||||
|
|
||||||
import blinker
|
import blinker
|
||||||
from bson import ObjectId
|
import bson
|
||||||
from flask import session, g
|
from flask import session, g
|
||||||
import flask_login
|
import flask_login
|
||||||
from werkzeug.local import LocalProxy
|
from werkzeug.local import LocalProxy
|
||||||
|
|
||||||
from pillar import current_app
|
from pillar import current_app
|
||||||
|
|
||||||
# The sender is the user that was just authenticated.
|
|
||||||
user_authenticated = blinker.Signal('Sent whenever a user was authenticated')
|
user_authenticated = blinker.Signal('Sent whenever a user was authenticated')
|
||||||
user_logged_in = blinker.Signal('Sent whenever a user logged in on the web')
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
# Mapping from user role to capabilities obtained by users with that role.
|
# Mapping from user role to capabilities obtained by users with that role.
|
||||||
@ -34,21 +28,16 @@ class UserClass(flask_login.UserMixin):
|
|||||||
def __init__(self, token: typing.Optional[str]):
|
def __init__(self, token: typing.Optional[str]):
|
||||||
# We store the Token instead of ID
|
# We store the Token instead of ID
|
||||||
self.id = token
|
self.id = token
|
||||||
self.auth_token = token
|
|
||||||
self.username: str = None
|
self.username: str = None
|
||||||
self.full_name: str = None
|
self.full_name: str = None
|
||||||
self.user_id: ObjectId = None
|
self.user_id: bson.ObjectId = None
|
||||||
self.objectid: str = None
|
self.objectid: str = None
|
||||||
|
self.gravatar: str = None
|
||||||
self.email: str = None
|
self.email: str = None
|
||||||
self.roles: typing.List[str] = []
|
self.roles: typing.List[str] = []
|
||||||
self.groups: typing.List[str] = [] # NOTE: these are stringified object IDs.
|
self.groups: typing.List[str] = [] # NOTE: these are stringified object IDs.
|
||||||
self.group_ids: typing.List[ObjectId] = []
|
self.group_ids: typing.List[bson.ObjectId] = []
|
||||||
self.capabilities: typing.Set[str] = set()
|
self.capabilities: typing.Set[str] = set()
|
||||||
self.nodes: dict = {} # see the 'nodes' key in eve_settings.py::user_schema.
|
|
||||||
self.badges_html: str = ''
|
|
||||||
|
|
||||||
# Stored when constructing a user from the database
|
|
||||||
self._db_user = {}
|
|
||||||
|
|
||||||
# Lazily evaluated
|
# Lazily evaluated
|
||||||
self._has_organizations: typing.Optional[bool] = None
|
self._has_organizations: typing.Optional[bool] = None
|
||||||
@ -57,24 +46,20 @@ class UserClass(flask_login.UserMixin):
|
|||||||
def construct(cls, token: str, db_user: dict) -> 'UserClass':
|
def construct(cls, token: str, db_user: dict) -> 'UserClass':
|
||||||
"""Constructs a new UserClass instance from a Mongo user document."""
|
"""Constructs a new UserClass instance from a Mongo user document."""
|
||||||
|
|
||||||
|
from ..api import utils
|
||||||
|
|
||||||
user = cls(token)
|
user = cls(token)
|
||||||
|
|
||||||
user._db_user = copy.deepcopy(db_user)
|
|
||||||
user.user_id = db_user.get('_id')
|
user.user_id = db_user.get('_id')
|
||||||
user.roles = db_user.get('roles') or []
|
user.roles = db_user.get('roles') or []
|
||||||
user.group_ids = db_user.get('groups') or []
|
user.group_ids = db_user.get('groups') or []
|
||||||
user.email = db_user.get('email') or ''
|
user.email = db_user.get('email') or ''
|
||||||
user.username = db_user.get('username') or ''
|
user.username = db_user.get('username') or ''
|
||||||
user.full_name = db_user.get('full_name') or ''
|
user.full_name = db_user.get('full_name') or ''
|
||||||
user.badges_html = db_user.get('badges', {}).get('html') or ''
|
|
||||||
|
|
||||||
# Be a little more specific than just db_user['nodes'] or db_user['avatar']
|
|
||||||
user.nodes = {
|
|
||||||
'view_progress': db_user.get('nodes', {}).get('view_progress', {}),
|
|
||||||
}
|
|
||||||
|
|
||||||
# Derived properties
|
# Derived properties
|
||||||
user.objectid = str(user.user_id or '')
|
user.objectid = str(user.user_id or '')
|
||||||
|
user.gravatar = utils.gravatar(user.email)
|
||||||
user.groups = [str(g) for g in user.group_ids]
|
user.groups = [str(g) for g in user.group_ids]
|
||||||
user.collect_capabilities()
|
user.collect_capabilities()
|
||||||
|
|
||||||
@ -167,31 +152,6 @@ class UserClass(flask_login.UserMixin):
|
|||||||
|
|
||||||
return bool(self._has_organizations)
|
return bool(self._has_organizations)
|
||||||
|
|
||||||
def frontend_info(self) -> dict:
|
|
||||||
"""Return a dictionary of user info for injecting into the page."""
|
|
||||||
|
|
||||||
return {
|
|
||||||
'user_id': str(self.user_id),
|
|
||||||
'username': self.username,
|
|
||||||
'full_name': self.full_name,
|
|
||||||
'avatar_url': self.avatar_url,
|
|
||||||
'email': self.email,
|
|
||||||
'capabilities': list(self.capabilities),
|
|
||||||
'badges_html': self.badges_html,
|
|
||||||
'is_authenticated': self.is_authenticated,
|
|
||||||
}
|
|
||||||
|
|
||||||
@property
|
|
||||||
@functools.lru_cache(maxsize=1)
|
|
||||||
def avatar_url(self) -> str:
|
|
||||||
"""Return the Avatar image URL for this user.
|
|
||||||
|
|
||||||
:return: The avatar URL (the default one if the user has no avatar).
|
|
||||||
"""
|
|
||||||
|
|
||||||
import pillar.api.users.avatar
|
|
||||||
return pillar.api.users.avatar.url(self._db_user)
|
|
||||||
|
|
||||||
|
|
||||||
class AnonymousUser(flask_login.AnonymousUserMixin, UserClass):
|
class AnonymousUser(flask_login.AnonymousUserMixin, UserClass):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
@ -250,15 +210,9 @@ def login_user(oauth_token: str, *, load_from_db=False):
|
|||||||
user = _load_user(oauth_token)
|
user = _load_user(oauth_token)
|
||||||
else:
|
else:
|
||||||
user = UserClass(oauth_token)
|
user = UserClass(oauth_token)
|
||||||
login_user_object(user)
|
|
||||||
|
|
||||||
|
|
||||||
def login_user_object(user: UserClass):
|
|
||||||
"""Log in the given user."""
|
|
||||||
flask_login.login_user(user, remember=True)
|
flask_login.login_user(user, remember=True)
|
||||||
g.current_user = user
|
g.current_user = user
|
||||||
user_authenticated.send(user)
|
user_authenticated.send(None)
|
||||||
user_logged_in.send(user)
|
|
||||||
|
|
||||||
|
|
||||||
def logout_user():
|
def logout_user():
|
||||||
@ -275,25 +229,6 @@ def logout_user():
|
|||||||
g.current_user = AnonymousUser()
|
g.current_user = AnonymousUser()
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
|
||||||
def temporary_user(db_user: dict):
|
|
||||||
"""Temporarily sets the given user as 'current user'.
|
|
||||||
|
|
||||||
Does not trigger login signals, as this is not a real login action.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
actual_current_user = g.current_user
|
|
||||||
except AttributeError:
|
|
||||||
actual_current_user = AnonymousUser()
|
|
||||||
|
|
||||||
temp_user = UserClass.construct('', db_user)
|
|
||||||
try:
|
|
||||||
g.current_user = temp_user
|
|
||||||
yield
|
|
||||||
finally:
|
|
||||||
g.current_user = actual_current_user
|
|
||||||
|
|
||||||
|
|
||||||
def get_blender_id_oauth_token() -> str:
|
def get_blender_id_oauth_token() -> str:
|
||||||
"""Returns the Blender ID auth token, or an empty string if there is none."""
|
"""Returns the Blender ID auth token, or an empty string if there is none."""
|
||||||
|
|
||||||
|
@ -1,48 +0,0 @@
|
|||||||
"""Support for adding CORS headers to responses."""
|
|
||||||
|
|
||||||
import functools
|
|
||||||
|
|
||||||
import flask
|
|
||||||
import werkzeug.wrappers as wz_wrappers
|
|
||||||
import werkzeug.exceptions as wz_exceptions
|
|
||||||
|
|
||||||
|
|
||||||
def allow(*, allow_credentials=False):
|
|
||||||
"""Flask endpoint decorator, adds CORS headers to the response.
|
|
||||||
|
|
||||||
If the request has a non-empty 'Origin' header, the response header
|
|
||||||
'Access-Control-Allow-Origin' is set to the value of that request header,
|
|
||||||
and some other CORS headers are set.
|
|
||||||
"""
|
|
||||||
def decorator(wrapped):
|
|
||||||
@functools.wraps(wrapped)
|
|
||||||
def wrapper(*args, **kwargs):
|
|
||||||
request_origin = flask.request.headers.get('Origin')
|
|
||||||
if not request_origin:
|
|
||||||
# No CORS headers requested, so don't bother touching the response.
|
|
||||||
return wrapped(*args, **kwargs)
|
|
||||||
|
|
||||||
try:
|
|
||||||
response = wrapped(*args, **kwargs)
|
|
||||||
except wz_exceptions.HTTPException as ex:
|
|
||||||
response = ex.get_response()
|
|
||||||
else:
|
|
||||||
if isinstance(response, tuple):
|
|
||||||
response = flask.make_response(*response)
|
|
||||||
elif isinstance(response, str):
|
|
||||||
response = flask.make_response(response)
|
|
||||||
elif isinstance(response, wz_wrappers.Response):
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
raise TypeError(f'unknown response type {type(response)}')
|
|
||||||
|
|
||||||
assert isinstance(response, wz_wrappers.Response)
|
|
||||||
|
|
||||||
response.headers.set('Access-Control-Allow-Origin', request_origin)
|
|
||||||
response.headers.set('Access-Control-Allow-Headers', 'x-requested-with')
|
|
||||||
if allow_credentials:
|
|
||||||
response.headers.set('Access-Control-Allow-Credentials', 'true')
|
|
||||||
|
|
||||||
return response
|
|
||||||
return wrapper
|
|
||||||
return decorator
|
|
@ -1,9 +1,8 @@
|
|||||||
import abc
|
import abc
|
||||||
|
import attr
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import typing
|
|
||||||
|
|
||||||
import attr
|
|
||||||
from rauth import OAuth2Service
|
from rauth import OAuth2Service
|
||||||
from flask import current_app, url_for, request, redirect, session, Response
|
from flask import current_app, url_for, request, redirect, session, Response
|
||||||
|
|
||||||
@ -16,8 +15,6 @@ class OAuthUserResponse:
|
|||||||
|
|
||||||
id = attr.ib(validator=attr.validators.instance_of(str))
|
id = attr.ib(validator=attr.validators.instance_of(str))
|
||||||
email = attr.ib(validator=attr.validators.instance_of(str))
|
email = attr.ib(validator=attr.validators.instance_of(str))
|
||||||
access_token = attr.ib(validator=attr.validators.instance_of(str))
|
|
||||||
scopes: typing.List[str] = attr.ib(validator=attr.validators.instance_of(list))
|
|
||||||
|
|
||||||
|
|
||||||
class OAuthError(Exception):
|
class OAuthError(Exception):
|
||||||
@ -130,10 +127,8 @@ class OAuthSignIn(metaclass=abc.ABCMeta):
|
|||||||
|
|
||||||
class BlenderIdSignIn(OAuthSignIn):
|
class BlenderIdSignIn(OAuthSignIn):
|
||||||
provider_name = 'blender-id'
|
provider_name = 'blender-id'
|
||||||
scopes = ['email', 'badge']
|
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
from urllib.parse import urljoin
|
|
||||||
super().__init__()
|
super().__init__()
|
||||||
|
|
||||||
base_url = current_app.config['BLENDER_ID_ENDPOINT']
|
base_url = current_app.config['BLENDER_ID_ENDPOINT']
|
||||||
@ -142,14 +137,14 @@ class BlenderIdSignIn(OAuthSignIn):
|
|||||||
name='blender-id',
|
name='blender-id',
|
||||||
client_id=self.consumer_id,
|
client_id=self.consumer_id,
|
||||||
client_secret=self.consumer_secret,
|
client_secret=self.consumer_secret,
|
||||||
authorize_url=urljoin(base_url, 'oauth/authorize'),
|
authorize_url='%s/oauth/authorize' % base_url,
|
||||||
access_token_url=urljoin(base_url, 'oauth/token'),
|
access_token_url='%s/oauth/token' % base_url,
|
||||||
base_url=urljoin(base_url, 'api/'),
|
base_url='%s/api/' % base_url
|
||||||
)
|
)
|
||||||
|
|
||||||
def authorize(self):
|
def authorize(self):
|
||||||
return redirect(self.service.get_authorize_url(
|
return redirect(self.service.get_authorize_url(
|
||||||
scope=' '.join(self.scopes),
|
scope='email',
|
||||||
response_type='code',
|
response_type='code',
|
||||||
redirect_uri=self.get_callback_url())
|
redirect_uri=self.get_callback_url())
|
||||||
)
|
)
|
||||||
@ -163,11 +158,7 @@ class BlenderIdSignIn(OAuthSignIn):
|
|||||||
|
|
||||||
session['blender_id_oauth_token'] = access_token
|
session['blender_id_oauth_token'] = access_token
|
||||||
me = oauth_session.get('user').json()
|
me = oauth_session.get('user').json()
|
||||||
|
return OAuthUserResponse(str(me['id']), me['email'])
|
||||||
# Blender ID doesn't tell us which scopes were granted by the user, so
|
|
||||||
# for now assume we got all the scopes we requested.
|
|
||||||
# (see https://github.com/jazzband/django-oauth-toolkit/issues/644)
|
|
||||||
return OAuthUserResponse(str(me['id']), me['email'], access_token, self.scopes)
|
|
||||||
|
|
||||||
|
|
||||||
class FacebookSignIn(OAuthSignIn):
|
class FacebookSignIn(OAuthSignIn):
|
||||||
@ -197,7 +188,7 @@ class FacebookSignIn(OAuthSignIn):
|
|||||||
me = oauth_session.get('me?fields=id,email').json()
|
me = oauth_session.get('me?fields=id,email').json()
|
||||||
# TODO handle case when user chooses not to disclose en email
|
# TODO handle case when user chooses not to disclose en email
|
||||||
# see https://developers.facebook.com/docs/graph-api/reference/user/
|
# see https://developers.facebook.com/docs/graph-api/reference/user/
|
||||||
return OAuthUserResponse(me['id'], me.get('email'), '', [])
|
return OAuthUserResponse(me['id'], me.get('email'))
|
||||||
|
|
||||||
|
|
||||||
class GoogleSignIn(OAuthSignIn):
|
class GoogleSignIn(OAuthSignIn):
|
||||||
@ -225,4 +216,4 @@ class GoogleSignIn(OAuthSignIn):
|
|||||||
oauth_session = self.make_oauth_session()
|
oauth_session = self.make_oauth_session()
|
||||||
|
|
||||||
me = oauth_session.get('userinfo').json()
|
me = oauth_session.get('userinfo').json()
|
||||||
return OAuthUserResponse(str(me['id']), me['email'], '', [])
|
return OAuthUserResponse(str(me['id']), me['email'])
|
||||||
|
@ -1,266 +0,0 @@
|
|||||||
import collections
|
|
||||||
import datetime
|
|
||||||
import logging
|
|
||||||
import typing
|
|
||||||
from urllib.parse import urljoin
|
|
||||||
|
|
||||||
import bson
|
|
||||||
import requests
|
|
||||||
|
|
||||||
from pillar import current_app, auth
|
|
||||||
from pillar.api.utils import utcnow
|
|
||||||
|
|
||||||
SyncUser = collections.namedtuple('SyncUser', 'user_id token bid_user_id')
|
|
||||||
BadgeHTML = collections.namedtuple('BadgeHTML', 'html expires')
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class StopRefreshing(Exception):
|
|
||||||
"""Indicates that Blender ID is having problems.
|
|
||||||
|
|
||||||
Further badge refreshes should be put on hold to avoid bludgeoning
|
|
||||||
a suffering Blender ID.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
def find_user_to_sync(user_id: bson.ObjectId) -> typing.Optional[SyncUser]:
|
|
||||||
"""Return user information for syncing badges for a specific user.
|
|
||||||
|
|
||||||
Returns None if the user cannot be synced (no 'badge' scope on a token,
|
|
||||||
or no Blender ID user_id known).
|
|
||||||
"""
|
|
||||||
my_log = log.getChild('refresh_single_user')
|
|
||||||
|
|
||||||
now = utcnow()
|
|
||||||
tokens_coll = current_app.db('tokens')
|
|
||||||
users_coll = current_app.db('users')
|
|
||||||
|
|
||||||
token_info = tokens_coll.find_one({
|
|
||||||
'user': user_id,
|
|
||||||
'token': {'$exists': True},
|
|
||||||
'oauth_scopes': 'badge',
|
|
||||||
'expire_time': {'$gt': now},
|
|
||||||
})
|
|
||||||
if not token_info:
|
|
||||||
my_log.debug('No token with scope "badge" for user %s', user_id)
|
|
||||||
return None
|
|
||||||
|
|
||||||
user_info = users_coll.find_one({'_id': user_id})
|
|
||||||
# TODO(Sybren): do this filtering in the MongoDB query:
|
|
||||||
bid_user_ids = [auth_info.get('user_id')
|
|
||||||
for auth_info in user_info.get('auth', [])
|
|
||||||
if auth_info.get('provider', '') == 'blender-id' and auth_info.get('user_id')]
|
|
||||||
if not bid_user_ids:
|
|
||||||
my_log.debug('No Blender ID user_id for user %s', user_id)
|
|
||||||
return None
|
|
||||||
|
|
||||||
bid_user_id = bid_user_ids[0]
|
|
||||||
return SyncUser(user_id=user_id, token=token_info['token'], bid_user_id=bid_user_id)
|
|
||||||
|
|
||||||
|
|
||||||
def find_users_to_sync() -> typing.Iterable[SyncUser]:
|
|
||||||
"""Return user information of syncable users with badges."""
|
|
||||||
|
|
||||||
now = utcnow()
|
|
||||||
tokens_coll = current_app.db('tokens')
|
|
||||||
cursor = tokens_coll.aggregate([
|
|
||||||
# Find all users who have a 'badge' scope in their OAuth token.
|
|
||||||
{'$match': {
|
|
||||||
'token': {'$exists': True},
|
|
||||||
'oauth_scopes': 'badge',
|
|
||||||
'expire_time': {'$gt': now},
|
|
||||||
# TODO(Sybren): save real token expiry time but keep checking tokens hourly when they are used!
|
|
||||||
}},
|
|
||||||
{'$lookup': {
|
|
||||||
'from': 'users',
|
|
||||||
'localField': 'user',
|
|
||||||
'foreignField': '_id',
|
|
||||||
'as': 'user'
|
|
||||||
}},
|
|
||||||
|
|
||||||
# Prevent 'user' from being an array.
|
|
||||||
{'$unwind': {'path': '$user'}},
|
|
||||||
|
|
||||||
# Get the Blender ID user ID only.
|
|
||||||
{'$unwind': {'path': '$user.auth'}},
|
|
||||||
{'$match': {'user.auth.provider': 'blender-id'}},
|
|
||||||
|
|
||||||
# Only select those users whose badge doesn't exist or has expired.
|
|
||||||
{'$match': {
|
|
||||||
'user.badges.expires': {'$not': {'$gt': now}}
|
|
||||||
}},
|
|
||||||
|
|
||||||
# Make sure that the badges that expire last are also refreshed last.
|
|
||||||
{'$sort': {'user.badges.expires': 1}},
|
|
||||||
|
|
||||||
# Reduce the document to the info we're after.
|
|
||||||
{'$project': {
|
|
||||||
'token': True,
|
|
||||||
'user._id': True,
|
|
||||||
'user.auth.user_id': True,
|
|
||||||
}},
|
|
||||||
])
|
|
||||||
|
|
||||||
log.debug('Aggregating tokens and users')
|
|
||||||
for user_info in cursor:
|
|
||||||
log.debug('User %s has badges %s',
|
|
||||||
user_info['user']['_id'], user_info['user'].get('badges'))
|
|
||||||
yield SyncUser(
|
|
||||||
user_id=user_info['user']['_id'],
|
|
||||||
token=user_info['token'],
|
|
||||||
bid_user_id=user_info['user']['auth']['user_id'])
|
|
||||||
|
|
||||||
|
|
||||||
def fetch_badge_html(session: requests.Session, user: SyncUser, size: str) \
|
|
||||||
-> str:
|
|
||||||
"""Fetch a Blender ID badge for this user.
|
|
||||||
|
|
||||||
:param session:
|
|
||||||
:param user:
|
|
||||||
:param size: Size indication for the badge images, see the Blender ID
|
|
||||||
documentation/code. As of this writing valid sizes are {'s', 'm', 'l'}.
|
|
||||||
"""
|
|
||||||
my_log = log.getChild('fetch_badge_html')
|
|
||||||
|
|
||||||
blender_id_endpoint = current_app.config['BLENDER_ID_ENDPOINT']
|
|
||||||
url = urljoin(blender_id_endpoint, f'api/badges/{user.bid_user_id}/html/{size}')
|
|
||||||
|
|
||||||
my_log.debug('Fetching badge HTML at %s for user %s', url, user.user_id)
|
|
||||||
try:
|
|
||||||
resp = session.get(url, headers={'Authorization': f'Bearer {user.token}'})
|
|
||||||
except requests.ConnectionError as ex:
|
|
||||||
my_log.warning('Unable to connect to Blender ID at %s: %s', url, ex)
|
|
||||||
raise StopRefreshing()
|
|
||||||
|
|
||||||
if resp.status_code == 204:
|
|
||||||
my_log.debug('No badges for user %s', user.user_id)
|
|
||||||
return ''
|
|
||||||
if resp.status_code == 403:
|
|
||||||
# TODO(Sybren): this indicates the token is invalid, so we could just as well delete it.
|
|
||||||
my_log.warning('Tried fetching %s for user %s but received a 403: %s',
|
|
||||||
url, user.user_id, resp.text)
|
|
||||||
return ''
|
|
||||||
if resp.status_code == 400:
|
|
||||||
my_log.warning('Blender ID did not accept our GET request at %s for user %s: %s',
|
|
||||||
url, user.user_id, resp.text)
|
|
||||||
return ''
|
|
||||||
if resp.status_code == 500:
|
|
||||||
my_log.warning('Blender ID returned an internal server error on %s for user %s, '
|
|
||||||
'aborting all badge refreshes: %s', url, user.user_id, resp.text)
|
|
||||||
raise StopRefreshing()
|
|
||||||
if resp.status_code == 404:
|
|
||||||
my_log.warning('Blender ID has no user %s for our user %s', user.bid_user_id, user.user_id)
|
|
||||||
return ''
|
|
||||||
resp.raise_for_status()
|
|
||||||
return resp.text
|
|
||||||
|
|
||||||
|
|
||||||
def refresh_all_badges(only_user_id: typing.Optional[bson.ObjectId] = None, *,
|
|
||||||
dry_run=False,
|
|
||||||
timelimit: datetime.timedelta):
|
|
||||||
"""Re-fetch all badges for all users, except when already refreshed recently.
|
|
||||||
|
|
||||||
:param only_user_id: Only refresh this user. This is expected to be used
|
|
||||||
sparingly during manual maintenance / debugging sessions only. It does
|
|
||||||
fetch all users to refresh, and in Python code skips all except the
|
|
||||||
given one.
|
|
||||||
:param dry_run: if True the changes are described in the log, but not performed.
|
|
||||||
:param timelimit: Refreshing will stop after this time. This allows for cron(-like)
|
|
||||||
jobs to run without overlapping, even when the number fo badges to refresh
|
|
||||||
becomes larger than possible within the period of the cron job.
|
|
||||||
"""
|
|
||||||
my_log = log.getChild('refresh_all_badges')
|
|
||||||
|
|
||||||
# Test the config before we start looping over the world.
|
|
||||||
badge_expiry = badge_expiry_config()
|
|
||||||
if not badge_expiry or not isinstance(badge_expiry, datetime.timedelta):
|
|
||||||
raise ValueError('BLENDER_ID_BADGE_EXPIRY not configured properly, should be a timedelta')
|
|
||||||
|
|
||||||
session = _get_requests_session()
|
|
||||||
deadline = utcnow() + timelimit
|
|
||||||
|
|
||||||
num_updates = 0
|
|
||||||
for user_info in find_users_to_sync():
|
|
||||||
if utcnow() > deadline:
|
|
||||||
my_log.info('Stopping badge refresh because the timelimit %s (H:MM:SS) was hit.',
|
|
||||||
timelimit)
|
|
||||||
break
|
|
||||||
|
|
||||||
if only_user_id and user_info.user_id != only_user_id:
|
|
||||||
my_log.debug('Skipping user %s', user_info.user_id)
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
badge_html = fetch_badge_html(session, user_info, 's')
|
|
||||||
except StopRefreshing:
|
|
||||||
my_log.error('Blender ID has internal problems, stopping badge refreshing at user %s',
|
|
||||||
user_info)
|
|
||||||
break
|
|
||||||
|
|
||||||
num_updates += 1
|
|
||||||
update_badges(user_info, badge_html, badge_expiry, dry_run=dry_run)
|
|
||||||
my_log.info('Updated badges of %d users%s', num_updates, ' (dry-run)' if dry_run else '')
|
|
||||||
|
|
||||||
|
|
||||||
def _get_requests_session() -> requests.Session:
|
|
||||||
from requests.adapters import HTTPAdapter
|
|
||||||
session = requests.Session()
|
|
||||||
session.mount('https://', HTTPAdapter(max_retries=5))
|
|
||||||
return session
|
|
||||||
|
|
||||||
|
|
||||||
def refresh_single_user(user_id: bson.ObjectId):
|
|
||||||
"""Refresh badges for a single user."""
|
|
||||||
my_log = log.getChild('refresh_single_user')
|
|
||||||
|
|
||||||
badge_expiry = badge_expiry_config()
|
|
||||||
if not badge_expiry:
|
|
||||||
my_log.warning('Skipping badge fetching, BLENDER_ID_BADGE_EXPIRY not configured')
|
|
||||||
|
|
||||||
my_log.debug('Fetching badges for user %s', user_id)
|
|
||||||
session = _get_requests_session()
|
|
||||||
user_info = find_user_to_sync(user_id)
|
|
||||||
if not user_info:
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
badge_html = fetch_badge_html(session, user_info, 's')
|
|
||||||
except StopRefreshing:
|
|
||||||
my_log.error('Blender ID has internal problems, stopping badge refreshing at user %s',
|
|
||||||
user_info)
|
|
||||||
return
|
|
||||||
|
|
||||||
update_badges(user_info, badge_html, badge_expiry, dry_run=False)
|
|
||||||
my_log.info('Updated badges of user %s', user_id)
|
|
||||||
|
|
||||||
|
|
||||||
def update_badges(user_info: SyncUser, badge_html: str, badge_expiry: datetime.timedelta,
|
|
||||||
*, dry_run: bool):
|
|
||||||
my_log = log.getChild('update_badges')
|
|
||||||
users_coll = current_app.db('users')
|
|
||||||
|
|
||||||
update = {'badges': {
|
|
||||||
'html': badge_html,
|
|
||||||
'expires': utcnow() + badge_expiry,
|
|
||||||
}}
|
|
||||||
my_log.info('Updating badges HTML for Blender ID %s, user %s',
|
|
||||||
user_info.bid_user_id, user_info.user_id)
|
|
||||||
|
|
||||||
if dry_run:
|
|
||||||
return
|
|
||||||
|
|
||||||
result = users_coll.update_one({'_id': user_info.user_id},
|
|
||||||
{'$set': update})
|
|
||||||
if result.matched_count != 1:
|
|
||||||
my_log.warning('Unable to update badges for user %s', user_info.user_id)
|
|
||||||
|
|
||||||
|
|
||||||
def badge_expiry_config() -> datetime.timedelta:
|
|
||||||
return current_app.config.get('BLENDER_ID_BADGE_EXPIRY')
|
|
||||||
|
|
||||||
|
|
||||||
@auth.user_logged_in.connect
|
|
||||||
def sync_badge_upon_login(sender: auth.UserClass, **kwargs):
|
|
||||||
"""Auto-sync badges when a user logs in."""
|
|
||||||
|
|
||||||
log.info('Refreshing badge of %s because they logged in', sender.user_id)
|
|
||||||
refresh_single_user(sender.user_id)
|
|
38
pillar/celery/algolia_indexing.py
Normal file
38
pillar/celery/algolia_indexing.py
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
import logging
|
||||||
|
|
||||||
|
from algoliasearch.helpers import AlgoliaException
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def push_updated_user(user_to_index: dict):
|
||||||
|
"""Push an update to the Algolia index when a user item is updated"""
|
||||||
|
|
||||||
|
from pillar.api.utils.algolia import index_user_save
|
||||||
|
|
||||||
|
try:
|
||||||
|
index_user_save(user_to_index)
|
||||||
|
except AlgoliaException as ex:
|
||||||
|
log.warning(
|
||||||
|
'Unable to push user info to Algolia for user "%s", id=%s; %s', # noqa
|
||||||
|
user_to_index.get('username'),
|
||||||
|
user_to_index.get('objectID'), ex)
|
||||||
|
|
||||||
|
|
||||||
|
def index_node_save(node_to_index: dict):
|
||||||
|
from pillar.api.utils import algolia
|
||||||
|
|
||||||
|
try:
|
||||||
|
algolia.index_node_save(node_to_index)
|
||||||
|
except AlgoliaException as ex:
|
||||||
|
log.warning(
|
||||||
|
'Unable to push node info to Algolia for node %s; %s', node_to_index, ex) # noqa
|
||||||
|
|
||||||
|
|
||||||
|
def index_node_delete(delete_id: str):
|
||||||
|
|
||||||
|
from pillar.api.utils import algolia
|
||||||
|
try:
|
||||||
|
algolia.index_node_delete(delete_id)
|
||||||
|
except AlgoliaException as ex:
|
||||||
|
log.warning('Unable to delete node info to Algolia for node %s; %s', delete_id, ex) # noqa
|
@ -1,29 +0,0 @@
|
|||||||
"""Avatar synchronisation.
|
|
||||||
|
|
||||||
Note that this module can only be imported when an application context is
|
|
||||||
active. Best to late-import this in the functions where it's needed.
|
|
||||||
"""
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from bson import ObjectId
|
|
||||||
import celery
|
|
||||||
|
|
||||||
from pillar import current_app
|
|
||||||
from pillar.api.users.avatar import sync_avatar
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
@current_app.celery.task(bind=True, ignore_result=True, acks_late=True)
|
|
||||||
def sync_avatar_for_user(self: celery.Task, user_id: str):
|
|
||||||
"""Downloads the user's avatar from Blender ID."""
|
|
||||||
# WARNING: when changing the signature of this function, also change the
|
|
||||||
# self.retry() call below.
|
|
||||||
|
|
||||||
uid = ObjectId(user_id)
|
|
||||||
|
|
||||||
try:
|
|
||||||
sync_avatar(uid)
|
|
||||||
except (IOError, OSError):
|
|
||||||
log.exception('Error downloading Blender ID avatar for user %s, will retry later')
|
|
||||||
self.retry((user_id, ), countdown=current_app.config['AVATAR_DOWNLOAD_CELERY_RETRY'])
|
|
@ -1,20 +0,0 @@
|
|||||||
"""Badge HTML synchronisation.
|
|
||||||
|
|
||||||
Note that this module can only be imported when an application context is
|
|
||||||
active. Best to late-import this in the functions where it's needed.
|
|
||||||
"""
|
|
||||||
import datetime
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from pillar import current_app, badge_sync
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
@current_app.celery.task(ignore_result=True)
|
|
||||||
def sync_badges_for_users(timelimit_seconds: int):
|
|
||||||
"""Synchronises Blender ID badges for the most-urgent users."""
|
|
||||||
|
|
||||||
timelimit = datetime.timedelta(seconds=timelimit_seconds)
|
|
||||||
log.info('Refreshing badges, timelimit is %s (H:MM:SS)', timelimit)
|
|
||||||
badge_sync.refresh_all_badges(timelimit=timelimit)
|
|
@ -1,6 +1,4 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
import bleach
|
|
||||||
from bson import ObjectId
|
from bson import ObjectId
|
||||||
|
|
||||||
from pillar import current_app
|
from pillar import current_app
|
||||||
@ -12,7 +10,7 @@ from pillar.api.search import algolia_indexing
|
|||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
INDEX_ALLOWED_NODE_TYPES = {'asset', 'texture', 'group', 'hdri', 'post'}
|
INDEX_ALLOWED_NODE_TYPES = {'asset', 'texture', 'group', 'hdri'}
|
||||||
|
|
||||||
|
|
||||||
SEARCH_BACKENDS = {
|
SEARCH_BACKENDS = {
|
||||||
@ -30,6 +28,34 @@ def _get_node_from_id(node_id: str):
|
|||||||
return node
|
return node
|
||||||
|
|
||||||
|
|
||||||
|
def _handle_picture(node: dict, to_index: dict):
|
||||||
|
"""Add picture URL in-place to the to-be-indexed node."""
|
||||||
|
|
||||||
|
picture_id = node.get('picture')
|
||||||
|
if not picture_id:
|
||||||
|
return
|
||||||
|
|
||||||
|
files_collection = current_app.data.driver.db['files']
|
||||||
|
lookup = {'_id': ObjectId(picture_id)}
|
||||||
|
picture = files_collection.find_one(lookup)
|
||||||
|
|
||||||
|
for item in picture.get('variations', []):
|
||||||
|
if item['size'] != 't':
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Not all files have a project...
|
||||||
|
pid = picture.get('project')
|
||||||
|
if pid:
|
||||||
|
link = generate_link(picture['backend'],
|
||||||
|
item['file_path'],
|
||||||
|
str(pid),
|
||||||
|
is_public=True)
|
||||||
|
else:
|
||||||
|
link = item['link']
|
||||||
|
to_index['picture'] = link
|
||||||
|
break
|
||||||
|
|
||||||
|
|
||||||
def prepare_node_data(node_id: str, node: dict=None) -> dict:
|
def prepare_node_data(node_id: str, node: dict=None) -> dict:
|
||||||
"""Given a node id or a node document, return an indexable version of it.
|
"""Given a node id or a node document, return an indexable version of it.
|
||||||
|
|
||||||
@ -60,30 +86,25 @@ def prepare_node_data(node_id: str, node: dict=None) -> dict:
|
|||||||
users_collection = current_app.data.driver.db['users']
|
users_collection = current_app.data.driver.db['users']
|
||||||
user = users_collection.find_one({'_id': ObjectId(node['user'])})
|
user = users_collection.find_one({'_id': ObjectId(node['user'])})
|
||||||
|
|
||||||
clean_description = bleach.clean(node.get('_description_html') or '', strip=True)
|
|
||||||
if not clean_description and node['node_type'] == 'post':
|
|
||||||
clean_description = bleach.clean(node['properties'].get('_content_html') or '', strip=True)
|
|
||||||
|
|
||||||
to_index = {
|
to_index = {
|
||||||
'objectID': node['_id'],
|
'objectID': node['_id'],
|
||||||
'name': node['name'],
|
'name': node['name'],
|
||||||
'project': {
|
'project': {
|
||||||
'_id': project['_id'],
|
'_id': project['_id'],
|
||||||
'name': project['name'],
|
'name': project['name']
|
||||||
'url': project['url'],
|
|
||||||
},
|
},
|
||||||
'created': node['_created'],
|
'created': node['_created'],
|
||||||
'updated': node['_updated'],
|
'updated': node['_updated'],
|
||||||
'node_type': node['node_type'],
|
'node_type': node['node_type'],
|
||||||
'picture': node.get('picture') or '',
|
|
||||||
'user': {
|
'user': {
|
||||||
'_id': user['_id'],
|
'_id': user['_id'],
|
||||||
'full_name': user['full_name']
|
'full_name': user['full_name']
|
||||||
},
|
},
|
||||||
'description': clean_description or None,
|
'description': node.get('description'),
|
||||||
'is_free': False
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
_handle_picture(node, to_index)
|
||||||
|
|
||||||
# If the node has world permissions, compute the Free permission
|
# If the node has world permissions, compute the Free permission
|
||||||
if 'world' in node.get('permissions', {}):
|
if 'world' in node.get('permissions', {}):
|
||||||
if 'GET' in node['permissions']['world']:
|
if 'GET' in node['permissions']['world']:
|
||||||
|
@ -13,7 +13,6 @@ from pillar.cli.maintenance import manager_maintenance
|
|||||||
from pillar.cli.operations import manager_operations
|
from pillar.cli.operations import manager_operations
|
||||||
from pillar.cli.setup import manager_setup
|
from pillar.cli.setup import manager_setup
|
||||||
from pillar.cli.elastic import manager_elastic
|
from pillar.cli.elastic import manager_elastic
|
||||||
from . import badges
|
|
||||||
|
|
||||||
from pillar.cli import translations
|
from pillar.cli import translations
|
||||||
|
|
||||||
@ -25,4 +24,3 @@ manager.add_command("maintenance", manager_maintenance)
|
|||||||
manager.add_command("setup", manager_setup)
|
manager.add_command("setup", manager_setup)
|
||||||
manager.add_command("operations", manager_operations)
|
manager.add_command("operations", manager_operations)
|
||||||
manager.add_command("elastic", manager_elastic)
|
manager.add_command("elastic", manager_elastic)
|
||||||
manager.add_command("badges", badges.manager)
|
|
||||||
|
@ -1,39 +0,0 @@
|
|||||||
import datetime
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from flask_script import Manager
|
|
||||||
from pillar import current_app, badge_sync
|
|
||||||
from pillar.api.utils import utcnow
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
manager = Manager(current_app, usage="Badge operations")
|
|
||||||
|
|
||||||
|
|
||||||
@manager.option('-u', '--user', dest='email', default='', help='Email address of the user to sync')
|
|
||||||
@manager.option('-a', '--all', dest='sync_all', action='store_true', default=False,
|
|
||||||
help='Sync all users')
|
|
||||||
@manager.option('--go', action='store_true', default=False,
|
|
||||||
help='Actually perform the sync; otherwise it is a dry-run.')
|
|
||||||
def sync(email: str = '', sync_all: bool=False, go: bool=False):
|
|
||||||
if bool(email) == bool(sync_all):
|
|
||||||
raise ValueError('Use either --user or --all.')
|
|
||||||
|
|
||||||
if email:
|
|
||||||
users_coll = current_app.db('users')
|
|
||||||
db_user = users_coll.find_one({'email': email}, projection={'_id': True})
|
|
||||||
if not db_user:
|
|
||||||
raise ValueError(f'No user with email {email!r} found')
|
|
||||||
specific_user = db_user['_id']
|
|
||||||
else:
|
|
||||||
specific_user = None
|
|
||||||
|
|
||||||
if not go:
|
|
||||||
log.info('Performing dry-run, not going to change the user database.')
|
|
||||||
start_time = utcnow()
|
|
||||||
badge_sync.refresh_all_badges(specific_user, dry_run=not go,
|
|
||||||
timelimit=datetime.timedelta(hours=1))
|
|
||||||
end_time = utcnow()
|
|
||||||
log.info('%s took %s (H:MM:SS)',
|
|
||||||
'Updating user badges' if go else 'Dry-run',
|
|
||||||
end_time - start_time)
|
|
@ -1,9 +1,7 @@
|
|||||||
import collections
|
|
||||||
import copy
|
import copy
|
||||||
import datetime
|
import datetime
|
||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
from pathlib import PurePosixPath, Path
|
from pathlib import PurePosixPath
|
||||||
import re
|
import re
|
||||||
import typing
|
import typing
|
||||||
|
|
||||||
@ -14,7 +12,6 @@ from flask_script import Manager
|
|||||||
import pymongo
|
import pymongo
|
||||||
|
|
||||||
from pillar import current_app
|
from pillar import current_app
|
||||||
import pillar.api.utils
|
|
||||||
|
|
||||||
# Collections to skip when finding file references (during orphan file detection).
|
# Collections to skip when finding file references (during orphan file detection).
|
||||||
# This collection can be added to from PillarExtension.setup_app().
|
# This collection can be added to from PillarExtension.setup_app().
|
||||||
@ -306,7 +303,7 @@ def purge_home_projects(go=False):
|
|||||||
yield pid
|
yield pid
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if users_coll.count_documents({'_id': uid, '_deleted': {'$ne': True}}) == 0:
|
if users_coll.find({'_id': uid, '_deleted': {'$ne': True}}).count() == 0:
|
||||||
log.info('Project %s has non-existing owner %s', pid, uid)
|
log.info('Project %s has non-existing owner %s', pid, uid)
|
||||||
bad += 1
|
bad += 1
|
||||||
yield pid
|
yield pid
|
||||||
@ -562,6 +559,50 @@ def replace_pillar_node_type_schemas(project_url=None, all_projects=False, missi
|
|||||||
projects_changed, projects_seen)
|
projects_changed, projects_seen)
|
||||||
|
|
||||||
|
|
||||||
|
@manager_maintenance.command
|
||||||
|
def remarkdown_comments():
|
||||||
|
"""Retranslates all Markdown to HTML for all comment nodes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from pillar.api.nodes import convert_markdown
|
||||||
|
|
||||||
|
nodes_collection = current_app.db()['nodes']
|
||||||
|
comments = nodes_collection.find({'node_type': 'comment'},
|
||||||
|
projection={'properties.content': 1,
|
||||||
|
'node_type': 1})
|
||||||
|
|
||||||
|
updated = identical = skipped = errors = 0
|
||||||
|
for node in comments:
|
||||||
|
convert_markdown(node)
|
||||||
|
node_id = node['_id']
|
||||||
|
|
||||||
|
try:
|
||||||
|
content_html = node['properties']['content_html']
|
||||||
|
except KeyError:
|
||||||
|
log.warning('Node %s has no content_html', node_id)
|
||||||
|
skipped += 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
result = nodes_collection.update_one(
|
||||||
|
{'_id': node_id},
|
||||||
|
{'$set': {'properties.content_html': content_html}}
|
||||||
|
)
|
||||||
|
if result.matched_count != 1:
|
||||||
|
log.error('Unable to update node %s', node_id)
|
||||||
|
errors += 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
if result.modified_count:
|
||||||
|
updated += 1
|
||||||
|
else:
|
||||||
|
identical += 1
|
||||||
|
|
||||||
|
log.info('updated : %i', updated)
|
||||||
|
log.info('identical: %i', identical)
|
||||||
|
log.info('skipped : %i', skipped)
|
||||||
|
log.info('errors : %i', errors)
|
||||||
|
|
||||||
|
|
||||||
@manager_maintenance.option('-p', '--project', dest='proj_url', nargs='?',
|
@manager_maintenance.option('-p', '--project', dest='proj_url', nargs='?',
|
||||||
help='Project URL')
|
help='Project URL')
|
||||||
@manager_maintenance.option('-a', '--all', dest='all_projects', action='store_true', default=False,
|
@manager_maintenance.option('-a', '--all', dest='all_projects', action='store_true', default=False,
|
||||||
@ -643,7 +684,7 @@ def upgrade_attachment_schema(proj_url=None, all_projects=False, go=False):
|
|||||||
log_proj()
|
log_proj()
|
||||||
log.info('Removed %d empty attachment dicts', res.modified_count)
|
log.info('Removed %d empty attachment dicts', res.modified_count)
|
||||||
else:
|
else:
|
||||||
to_remove = nodes_coll.count_documents({'properties.attachments': {},
|
to_remove = nodes_coll.count({'properties.attachments': {},
|
||||||
'project': project['_id']})
|
'project': project['_id']})
|
||||||
if to_remove:
|
if to_remove:
|
||||||
log_proj()
|
log_proj()
|
||||||
@ -726,9 +767,7 @@ def iter_markdown(proj_node_types: dict, some_node: dict, callback: typing.Calla
|
|||||||
continue
|
continue
|
||||||
to_visit.append((subdoc, definition['schema']))
|
to_visit.append((subdoc, definition['schema']))
|
||||||
continue
|
continue
|
||||||
coerce = definition.get('coerce') # Eve < 0.8
|
if definition.get('coerce') != 'markdown':
|
||||||
validator = definition.get('check_with') or definition.get('validator') # Eve >= 0.8
|
|
||||||
if coerce != 'markdown' and validator != 'markdown':
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
my_log.debug('I have to change %r of %s', key, doc)
|
my_log.debug('I have to change %r of %s', key, doc)
|
||||||
@ -739,6 +778,113 @@ def iter_markdown(proj_node_types: dict, some_node: dict, callback: typing.Calla
|
|||||||
doc[key] = new_value
|
doc[key] = new_value
|
||||||
|
|
||||||
|
|
||||||
|
@manager_maintenance.option('-p', '--project', dest='proj_url', nargs='?',
|
||||||
|
help='Project URL')
|
||||||
|
@manager_maintenance.option('-a', '--all', dest='all_projects', action='store_true', default=False,
|
||||||
|
help='Replace on all projects.')
|
||||||
|
@manager_maintenance.option('-g', '--go', dest='go', action='store_true', default=False,
|
||||||
|
help='Actually perform the changes (otherwise just show as dry-run).')
|
||||||
|
def upgrade_attachment_usage(proj_url=None, all_projects=False, go=False):
|
||||||
|
"""Replaces '@[slug]' with '{attachment slug}'.
|
||||||
|
|
||||||
|
Also moves links from the attachment dict to the attachment shortcode.
|
||||||
|
"""
|
||||||
|
if bool(proj_url) == all_projects:
|
||||||
|
log.error('Use either --project or --all.')
|
||||||
|
return 1
|
||||||
|
|
||||||
|
import html
|
||||||
|
from pillar.api.projects.utils import node_type_dict
|
||||||
|
from pillar.api.utils import remove_private_keys
|
||||||
|
from pillar.api.utils.authentication import force_cli_user
|
||||||
|
|
||||||
|
force_cli_user()
|
||||||
|
|
||||||
|
nodes_coll = current_app.db('nodes')
|
||||||
|
total_nodes = 0
|
||||||
|
failed_node_ids = set()
|
||||||
|
|
||||||
|
# Use a mixture of the old slug RE that still allowes spaces in the slug
|
||||||
|
# name and the new RE that allows dashes.
|
||||||
|
old_slug_re = re.compile(r'@\[([a-zA-Z0-9_\- ]+)\]')
|
||||||
|
for proj in _db_projects(proj_url, all_projects, go=go):
|
||||||
|
proj_id = proj['_id']
|
||||||
|
proj_url = proj.get('url', '-no-url-')
|
||||||
|
nodes = nodes_coll.find({
|
||||||
|
'_deleted': {'$ne': True},
|
||||||
|
'project': proj_id,
|
||||||
|
'properties.attachments': {'$exists': True},
|
||||||
|
})
|
||||||
|
node_count = nodes.count()
|
||||||
|
if node_count == 0:
|
||||||
|
log.debug('Skipping project %s (%s)', proj_url, proj_id)
|
||||||
|
continue
|
||||||
|
|
||||||
|
proj_node_types = node_type_dict(proj)
|
||||||
|
|
||||||
|
for node in nodes:
|
||||||
|
attachments = node['properties']['attachments']
|
||||||
|
replaced = False
|
||||||
|
|
||||||
|
# Inner functions because of access to the node's attachments.
|
||||||
|
def replace(match):
|
||||||
|
nonlocal replaced
|
||||||
|
slug = match.group(1)
|
||||||
|
log.debug(' - OLD STYLE attachment slug %r', slug)
|
||||||
|
try:
|
||||||
|
att = attachments[slug]
|
||||||
|
except KeyError:
|
||||||
|
log.info("Attachment %r not found for node %s", slug, node['_id'])
|
||||||
|
link = ''
|
||||||
|
else:
|
||||||
|
link = att.get('link', '')
|
||||||
|
if link == 'self':
|
||||||
|
link = " link='self'"
|
||||||
|
elif link == 'custom':
|
||||||
|
url = att.get('link_custom')
|
||||||
|
if url:
|
||||||
|
link = " link='%s'" % html.escape(url)
|
||||||
|
replaced = True
|
||||||
|
return '{attachment %r%s}' % (slug.replace(' ', '-'), link)
|
||||||
|
|
||||||
|
def update_markdown(value: str) -> str:
|
||||||
|
return old_slug_re.sub(replace, value)
|
||||||
|
|
||||||
|
iter_markdown(proj_node_types, node, update_markdown)
|
||||||
|
|
||||||
|
# Remove no longer used properties from attachments
|
||||||
|
new_attachments = {}
|
||||||
|
for slug, attachment in attachments.items():
|
||||||
|
replaced |= 'link' in attachment # link_custom implies link
|
||||||
|
attachment.pop('link', None)
|
||||||
|
attachment.pop('link_custom', None)
|
||||||
|
new_attachments[slug.replace(' ', '-')] = attachment
|
||||||
|
node['properties']['attachments'] = new_attachments
|
||||||
|
|
||||||
|
if replaced:
|
||||||
|
total_nodes += 1
|
||||||
|
else:
|
||||||
|
# Nothing got replaced,
|
||||||
|
continue
|
||||||
|
|
||||||
|
if go:
|
||||||
|
# Use Eve to PUT, so we have schema checking.
|
||||||
|
db_node = remove_private_keys(node)
|
||||||
|
r, _, _, status = current_app.put_internal('nodes', db_node, _id=node['_id'])
|
||||||
|
if status != 200:
|
||||||
|
log.error('Error %i storing altered node %s %s', status, node['_id'], r)
|
||||||
|
failed_node_ids.add(node['_id'])
|
||||||
|
# raise SystemExit('Error storing node; see log.')
|
||||||
|
log.debug('Updated node %s: %s', node['_id'], r)
|
||||||
|
|
||||||
|
log.info('Project %s (%s) has %d nodes with attachments',
|
||||||
|
proj_url, proj_id, node_count)
|
||||||
|
log.info('%s %d nodes', 'Updated' if go else 'Would update', total_nodes)
|
||||||
|
if failed_node_ids:
|
||||||
|
log.warning('Failed to update %d of %d nodes: %s', len(failed_node_ids), total_nodes,
|
||||||
|
', '.join(str(nid) for nid in failed_node_ids))
|
||||||
|
|
||||||
|
|
||||||
def _db_projects(proj_url: str, all_projects: bool, project_id='', *, go: bool) \
|
def _db_projects(proj_url: str, all_projects: bool, project_id='', *, go: bool) \
|
||||||
-> typing.Iterable[dict]:
|
-> typing.Iterable[dict]:
|
||||||
"""Yields a subset of the projects in the database.
|
"""Yields a subset of the projects in the database.
|
||||||
@ -778,12 +924,25 @@ def _db_projects(proj_url: str, all_projects: bool, project_id='', *, go: bool)
|
|||||||
log.info('Command took %s', duration)
|
log.info('Command took %s', duration)
|
||||||
|
|
||||||
|
|
||||||
def find_object_ids(something: typing.Any) -> typing.Iterable[bson.ObjectId]:
|
def _find_orphan_files() -> typing.Set[bson.ObjectId]:
|
||||||
"""Generator, yields all ObjectIDs referenced by the given object.
|
"""Finds all non-referenced files for the given project.
|
||||||
|
|
||||||
Assumes 'something' comes from a MongoDB. This function wasn't made for
|
Returns an iterable of all orphan file IDs.
|
||||||
generic Python objects.
|
|
||||||
"""
|
"""
|
||||||
|
log.debug('Finding orphan files')
|
||||||
|
|
||||||
|
# Get all file IDs that belong to this project.
|
||||||
|
files_coll = current_app.db('files')
|
||||||
|
cursor = files_coll.find({'_deleted': {'$ne': True}}, projection={'_id': 1})
|
||||||
|
file_ids = {doc['_id'] for doc in cursor}
|
||||||
|
if not file_ids:
|
||||||
|
log.debug('No files found')
|
||||||
|
return set()
|
||||||
|
|
||||||
|
total_file_count = len(file_ids)
|
||||||
|
log.debug('Found %d files in total', total_file_count)
|
||||||
|
|
||||||
|
def find_object_ids(something: typing.Any) -> typing.Iterable[bson.ObjectId]:
|
||||||
if isinstance(something, bson.ObjectId):
|
if isinstance(something, bson.ObjectId):
|
||||||
yield something
|
yield something
|
||||||
elif isinstance(something, str) and len(something) == 24:
|
elif isinstance(something, str) and len(something) == 24:
|
||||||
@ -796,34 +955,13 @@ def find_object_ids(something: typing.Any) -> typing.Iterable[bson.ObjectId]:
|
|||||||
for item in something:
|
for item in something:
|
||||||
yield from find_object_ids(item)
|
yield from find_object_ids(item)
|
||||||
elif isinstance(something, dict):
|
elif isinstance(something, dict):
|
||||||
for item in something.keys():
|
|
||||||
yield from find_object_ids(item)
|
|
||||||
for item in something.values():
|
for item in something.values():
|
||||||
yield from find_object_ids(item)
|
yield from find_object_ids(item)
|
||||||
|
|
||||||
|
|
||||||
def _find_orphan_files() -> typing.Set[bson.ObjectId]:
|
|
||||||
"""Finds all non-referenced files.
|
|
||||||
|
|
||||||
Returns an iterable of all orphan file IDs.
|
|
||||||
"""
|
|
||||||
log.debug('Finding orphan files')
|
|
||||||
|
|
||||||
# Get all file IDs and make a set; we'll remove any referenced object ID later.
|
|
||||||
files_coll = current_app.db('files')
|
|
||||||
cursor = files_coll.find({'_deleted': {'$ne': True}}, projection={'_id': 1})
|
|
||||||
file_ids = {doc['_id'] for doc in cursor}
|
|
||||||
if not file_ids:
|
|
||||||
log.debug('No files found')
|
|
||||||
return set()
|
|
||||||
|
|
||||||
total_file_count = len(file_ids)
|
|
||||||
log.debug('Found %d files in total', total_file_count)
|
|
||||||
|
|
||||||
# Find all references by iterating through the project itself and every document that has a
|
# Find all references by iterating through the project itself and every document that has a
|
||||||
# 'project' key set to this ObjectId.
|
# 'project' key set to this ObjectId.
|
||||||
db = current_app.db()
|
db = current_app.db()
|
||||||
for coll_name in sorted(db.list_collection_names()):
|
for coll_name in sorted(db.collection_names(include_system_collections=False)):
|
||||||
if coll_name in ORPHAN_FINDER_SKIP_COLLECTIONS:
|
if coll_name in ORPHAN_FINDER_SKIP_COLLECTIONS:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -849,6 +987,7 @@ def find_orphan_files():
|
|||||||
This is a heavy operation that inspects *everything* in MongoDB. Use with care.
|
This is a heavy operation that inspects *everything* in MongoDB. Use with care.
|
||||||
"""
|
"""
|
||||||
from jinja2.filters import do_filesizeformat
|
from jinja2.filters import do_filesizeformat
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
output_fpath = Path(current_app.config['STORAGE_DIR']) / 'orphan-files.txt'
|
output_fpath = Path(current_app.config['STORAGE_DIR']) / 'orphan-files.txt'
|
||||||
if output_fpath.exists():
|
if output_fpath.exists():
|
||||||
@ -894,6 +1033,7 @@ def delete_orphan_files():
|
|||||||
Use 'find_orphan_files' first to generate orphan-files.txt.
|
Use 'find_orphan_files' first to generate orphan-files.txt.
|
||||||
"""
|
"""
|
||||||
import pymongo.results
|
import pymongo.results
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
output_fpath = Path(current_app.config['STORAGE_DIR']) / 'orphan-files.txt'
|
output_fpath = Path(current_app.config['STORAGE_DIR']) / 'orphan-files.txt'
|
||||||
with output_fpath.open('r', encoding='ascii') as infile:
|
with output_fpath.open('r', encoding='ascii') as infile:
|
||||||
@ -924,410 +1064,3 @@ def delete_orphan_files():
|
|||||||
log.warning('Soft-deletion modified %d of %d files', res.modified_count, file_count)
|
log.warning('Soft-deletion modified %d of %d files', res.modified_count, file_count)
|
||||||
|
|
||||||
log.info('%d files have been soft-deleted', res.modified_count)
|
log.info('%d files have been soft-deleted', res.modified_count)
|
||||||
|
|
||||||
|
|
||||||
@manager_maintenance.command
|
|
||||||
def find_video_files_without_duration():
|
|
||||||
"""Finds video files without any duration
|
|
||||||
|
|
||||||
This is a heavy operation. Use with care.
|
|
||||||
"""
|
|
||||||
|
|
||||||
output_fpath = Path(current_app.config['STORAGE_DIR']) / 'video_files_without_duration.txt'
|
|
||||||
if output_fpath.exists():
|
|
||||||
log.error('Output filename %s already exists, remove it first.', output_fpath)
|
|
||||||
return 1
|
|
||||||
|
|
||||||
start_timestamp = datetime.datetime.now()
|
|
||||||
files_coll = current_app.db('files')
|
|
||||||
starts_with_video = re.compile("^video", re.IGNORECASE)
|
|
||||||
aggr = files_coll.aggregate([
|
|
||||||
{'$match': {'content_type': starts_with_video,
|
|
||||||
'_deleted': {'$ne': True}}},
|
|
||||||
{'$unwind': '$variations'},
|
|
||||||
{'$match': {
|
|
||||||
'variations.duration': {'$not': {'$gt': 0}}
|
|
||||||
}},
|
|
||||||
{'$project': {'_id': 1}}
|
|
||||||
])
|
|
||||||
|
|
||||||
file_ids = [str(f['_id']) for f in aggr]
|
|
||||||
nbr_files = len(file_ids)
|
|
||||||
log.info('Total nbr video files without duration: %d', nbr_files)
|
|
||||||
|
|
||||||
end_timestamp = datetime.datetime.now()
|
|
||||||
duration = end_timestamp - start_timestamp
|
|
||||||
log.info('Finding files took %s', duration)
|
|
||||||
|
|
||||||
log.info('Writing Object IDs to %s', output_fpath)
|
|
||||||
with output_fpath.open('w', encoding='ascii') as outfile:
|
|
||||||
outfile.write('\n'.join(sorted(file_ids)))
|
|
||||||
|
|
||||||
|
|
||||||
@manager_maintenance.command
|
|
||||||
def find_video_nodes_without_duration():
|
|
||||||
"""Finds video nodes without any duration
|
|
||||||
|
|
||||||
This is a heavy operation. Use with care.
|
|
||||||
"""
|
|
||||||
|
|
||||||
output_fpath = Path(current_app.config['STORAGE_DIR']) / 'video_nodes_without_duration.txt'
|
|
||||||
if output_fpath.exists():
|
|
||||||
log.error('Output filename %s already exists, remove it first.', output_fpath)
|
|
||||||
return 1
|
|
||||||
|
|
||||||
start_timestamp = datetime.datetime.now()
|
|
||||||
nodes_coll = current_app.db('nodes')
|
|
||||||
|
|
||||||
aggr = nodes_coll.aggregate([
|
|
||||||
{'$match': {'node_type': 'asset',
|
|
||||||
'properties.content_type': 'video',
|
|
||||||
'_deleted': {'$ne': True},
|
|
||||||
'properties.duration_seconds': {'$not': {'$gt': 0}}}},
|
|
||||||
{'$project': {'_id': 1}}
|
|
||||||
])
|
|
||||||
|
|
||||||
file_ids = [str(f['_id']) for f in aggr]
|
|
||||||
nbr_files = len(file_ids)
|
|
||||||
log.info('Total nbr video nodes without duration: %d', nbr_files)
|
|
||||||
|
|
||||||
end_timestamp = datetime.datetime.now()
|
|
||||||
duration = end_timestamp - start_timestamp
|
|
||||||
log.info('Finding nodes took %s', duration)
|
|
||||||
|
|
||||||
log.info('Writing Object IDs to %s', output_fpath)
|
|
||||||
with output_fpath.open('w', encoding='ascii') as outfile:
|
|
||||||
outfile.write('\n'.join(sorted(file_ids)))
|
|
||||||
|
|
||||||
|
|
||||||
@manager_maintenance.option('-n', '--nodes', dest='nodes_to_update', nargs='*',
|
|
||||||
help='List of nodes to update')
|
|
||||||
@manager_maintenance.option('-a', '--all', dest='all_nodes', action='store_true', default=False,
|
|
||||||
help='Update on all video nodes.')
|
|
||||||
@manager_maintenance.option('-g', '--go', dest='go', action='store_true', default=False,
|
|
||||||
help='Actually perform the changes (otherwise just show as dry-run).')
|
|
||||||
def reconcile_node_video_duration(nodes_to_update=None, all_nodes=False, go=False):
|
|
||||||
"""Copy video duration from file.variations.duration to node.properties.duraion_seconds
|
|
||||||
|
|
||||||
This is a heavy operation. Use with care.
|
|
||||||
"""
|
|
||||||
from pillar.api.utils import random_etag, utcnow
|
|
||||||
|
|
||||||
if bool(nodes_to_update) == all_nodes:
|
|
||||||
log.error('Use either --nodes or --all.')
|
|
||||||
return 1
|
|
||||||
|
|
||||||
start_timestamp = datetime.datetime.now()
|
|
||||||
|
|
||||||
nodes_coll = current_app.db('nodes')
|
|
||||||
node_subset = []
|
|
||||||
if nodes_to_update:
|
|
||||||
node_subset = [{'$match': {'_id': {'$in': [ObjectId(nid) for nid in nodes_to_update]}}}]
|
|
||||||
files = nodes_coll.aggregate(
|
|
||||||
[
|
|
||||||
*node_subset,
|
|
||||||
{'$match': {
|
|
||||||
'node_type': 'asset',
|
|
||||||
'properties.content_type': 'video',
|
|
||||||
'_deleted': {'$ne': True}}
|
|
||||||
},
|
|
||||||
{'$lookup': {
|
|
||||||
'from': 'files',
|
|
||||||
'localField': 'properties.file',
|
|
||||||
'foreignField': '_id',
|
|
||||||
'as': '_files',
|
|
||||||
}},
|
|
||||||
{'$unwind': '$_files'},
|
|
||||||
{'$unwind': '$_files.variations'},
|
|
||||||
{'$match': {'_files.variations.duration': {'$gt': 0}}},
|
|
||||||
{'$addFields': {
|
|
||||||
'need_update': {
|
|
||||||
'$ne': ['$_files.variations.duration', '$properties.duration_seconds']}
|
|
||||||
}},
|
|
||||||
{'$match': {'need_update': True}},
|
|
||||||
{'$project': {
|
|
||||||
'_id': 1,
|
|
||||||
'duration': '$_files.variations.duration',
|
|
||||||
}}]
|
|
||||||
)
|
|
||||||
|
|
||||||
if not go:
|
|
||||||
log.info('Would try to update %d nodes', len(list(files)))
|
|
||||||
return 0
|
|
||||||
|
|
||||||
modified_count = 0
|
|
||||||
for f in files:
|
|
||||||
log.debug('Updating node %s with duration %d', f['_id'], f['duration'])
|
|
||||||
new_etag = random_etag()
|
|
||||||
now = utcnow()
|
|
||||||
resp = nodes_coll.update_one(
|
|
||||||
{'_id': f['_id']},
|
|
||||||
{'$set': {
|
|
||||||
'properties.duration_seconds': f['duration'],
|
|
||||||
'_etag': new_etag,
|
|
||||||
'_updated': now,
|
|
||||||
}}
|
|
||||||
)
|
|
||||||
if resp.modified_count == 0:
|
|
||||||
log.debug('Node %s was already up to date', f['_id'])
|
|
||||||
modified_count += resp.modified_count
|
|
||||||
|
|
||||||
log.info('Updated %d nodes', modified_count)
|
|
||||||
end_timestamp = datetime.datetime.now()
|
|
||||||
duration = end_timestamp - start_timestamp
|
|
||||||
log.info('Operation took %s', duration)
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
@manager_maintenance.option('-g', '--go', dest='go', action='store_true', default=False,
|
|
||||||
help='Actually perform the changes (otherwise just show as dry-run).')
|
|
||||||
def delete_projectless_files(go=False):
|
|
||||||
"""Soft-deletes file documents of projects that have been deleted.
|
|
||||||
|
|
||||||
WARNING: this also soft-deletes file documents that do not have a project
|
|
||||||
property at all.
|
|
||||||
"""
|
|
||||||
|
|
||||||
start_timestamp = datetime.datetime.now()
|
|
||||||
|
|
||||||
files_coll = current_app.db('files')
|
|
||||||
aggr = files_coll.aggregate([
|
|
||||||
{'$match': {'_deleted': {'$ne': True}}},
|
|
||||||
{'$lookup': {
|
|
||||||
'from': 'projects',
|
|
||||||
'localField': 'project',
|
|
||||||
'foreignField': '_id',
|
|
||||||
'as': '_project'
|
|
||||||
}},
|
|
||||||
{'$match': {'$or': [
|
|
||||||
{'_project': []},
|
|
||||||
{'_project._deleted': True},
|
|
||||||
]}},
|
|
||||||
{'$project': {'_id': True}},
|
|
||||||
])
|
|
||||||
|
|
||||||
files_to_delete: typing.List[ObjectId] = [doc['_id'] for doc in aggr]
|
|
||||||
orphan_count = len(files_to_delete)
|
|
||||||
log.info('Total number of files to soft-delete: %d', orphan_count)
|
|
||||||
|
|
||||||
total_count = files_coll.count_documents({'_deleted': {'$ne': True}})
|
|
||||||
log.info('Total nr of orphan files: %d', orphan_count)
|
|
||||||
log.info('Total nr of files : %d', total_count)
|
|
||||||
log.info('Orphan percentage : %d%%', 100 * orphan_count / total_count)
|
|
||||||
|
|
||||||
if go:
|
|
||||||
log.info('Soft-deleting all %d projectless files', orphan_count)
|
|
||||||
now = pillar.api.utils.utcnow()
|
|
||||||
etag = pillar.api.utils.random_etag()
|
|
||||||
result = files_coll.update_many(
|
|
||||||
{'_id': {'$in': files_to_delete}},
|
|
||||||
{'$set': {
|
|
||||||
'_deleted': True,
|
|
||||||
'_updated': now,
|
|
||||||
'_etag': etag,
|
|
||||||
}},
|
|
||||||
)
|
|
||||||
log.info('Matched count: %d', result.matched_count)
|
|
||||||
log.info('Modified count: %d', result.modified_count)
|
|
||||||
|
|
||||||
end_timestamp = datetime.datetime.now()
|
|
||||||
duration = end_timestamp - start_timestamp
|
|
||||||
|
|
||||||
if go:
|
|
||||||
verb = 'Soft-deleting'
|
|
||||||
else:
|
|
||||||
verb = 'Finding'
|
|
||||||
log.info('%s orphans took %s', verb, duration)
|
|
||||||
|
|
||||||
|
|
||||||
@manager_maintenance.command
|
|
||||||
def find_projects_for_files():
|
|
||||||
"""For file documents without project, tries to find in which project files are used.
|
|
||||||
|
|
||||||
This is a heavy operation that inspects *everything* in MongoDB. Use with care.
|
|
||||||
"""
|
|
||||||
|
|
||||||
output_fpath = Path(current_app.config['STORAGE_DIR']) / 'files-without-project.json'
|
|
||||||
if output_fpath.exists():
|
|
||||||
log.error('Output filename %s already exists, remove it first.', output_fpath)
|
|
||||||
return 1
|
|
||||||
|
|
||||||
start_timestamp = datetime.datetime.now()
|
|
||||||
|
|
||||||
log.info('Finding files to fix...')
|
|
||||||
files_coll = current_app.db('files')
|
|
||||||
query = {'project': {'$exists': False},
|
|
||||||
'_deleted': {'$ne': True}}
|
|
||||||
|
|
||||||
files_to_fix = {file_doc['_id']: None for file_doc in files_coll.find(query)}
|
|
||||||
if not files_to_fix:
|
|
||||||
log.info('No files without projects found, congratulations.')
|
|
||||||
return 0
|
|
||||||
|
|
||||||
# Find all references by iterating through every node and project, and
|
|
||||||
# hoping that they reference the file.
|
|
||||||
projects_coll = current_app.db('projects')
|
|
||||||
existing_projects: typing.MutableSet[ObjectId] = set()
|
|
||||||
for doc in projects_coll.find():
|
|
||||||
project_id = doc['_id']
|
|
||||||
existing_projects.add(project_id)
|
|
||||||
|
|
||||||
for obj_id in find_object_ids(doc):
|
|
||||||
if obj_id not in files_to_fix:
|
|
||||||
continue
|
|
||||||
|
|
||||||
files_to_fix[obj_id] = project_id
|
|
||||||
|
|
||||||
nodes_coll = current_app.db('nodes')
|
|
||||||
for doc in nodes_coll.find():
|
|
||||||
project_id = doc.get('project')
|
|
||||||
if not project_id:
|
|
||||||
log.warning('Skipping node %s, as it is not part of any project', doc['_id'])
|
|
||||||
continue
|
|
||||||
if project_id not in existing_projects:
|
|
||||||
log.warning('Skipping node %s, as its project %s does not exist',
|
|
||||||
doc['_id'], project_id)
|
|
||||||
continue
|
|
||||||
|
|
||||||
for obj_id in find_object_ids(doc):
|
|
||||||
if obj_id not in files_to_fix:
|
|
||||||
continue
|
|
||||||
|
|
||||||
files_to_fix[obj_id] = project_id
|
|
||||||
|
|
||||||
orphans = {oid for oid, project_id in files_to_fix.items()
|
|
||||||
if project_id is None}
|
|
||||||
fixable = {str(oid): str(project_id)
|
|
||||||
for oid, project_id in files_to_fix.items()
|
|
||||||
if project_id is not None}
|
|
||||||
|
|
||||||
log.info('Total nr of orphan files : %d', len(orphans))
|
|
||||||
log.info('Total nr of fixable files: %d', len(fixable))
|
|
||||||
|
|
||||||
projects = set(fixable.values())
|
|
||||||
log.info('Fixable project count : %d', len(projects))
|
|
||||||
for project_id in projects:
|
|
||||||
project = projects_coll.find_one(ObjectId(project_id))
|
|
||||||
log.info(' - %40s /p/%-20s created on %s, ',
|
|
||||||
project['name'], project['url'], project['_created'])
|
|
||||||
|
|
||||||
end_timestamp = datetime.datetime.now()
|
|
||||||
duration = end_timestamp - start_timestamp
|
|
||||||
log.info('Finding projects took %s', duration)
|
|
||||||
|
|
||||||
log.info('Writing {file_id: project_id} mapping to %s', output_fpath)
|
|
||||||
with output_fpath.open('w', encoding='ascii') as outfile:
|
|
||||||
json.dump(fixable, outfile, indent=4, sort_keys=True)
|
|
||||||
|
|
||||||
|
|
||||||
@manager_maintenance.option('filepath', type=Path,
|
|
||||||
help='JSON file produced by find_projects_for_files')
|
|
||||||
@manager_maintenance.option('-g', '--go', dest='go', action='store_true', default=False,
|
|
||||||
help='Actually perform the changes (otherwise just show as dry-run).')
|
|
||||||
def fix_projects_for_files(filepath: Path, go=False):
|
|
||||||
"""Assigns file documents to projects.
|
|
||||||
|
|
||||||
Use 'manage.py maintenance find_projects_for_files` to produce the JSON
|
|
||||||
file that contains the file ID to project ID mapping.
|
|
||||||
"""
|
|
||||||
|
|
||||||
log.info('Loading %s', filepath)
|
|
||||||
with filepath.open('r', encoding='ascii') as infile:
|
|
||||||
mapping: typing.Mapping[str, str] = json.load(infile)
|
|
||||||
|
|
||||||
# Group IDs per project for more efficient querying.
|
|
||||||
log.info('Grouping per project')
|
|
||||||
project_to_file_ids: typing.Mapping[ObjectId, typing.List[ObjectId]] = \
|
|
||||||
collections.defaultdict(list)
|
|
||||||
for file_id, project_id in mapping.items():
|
|
||||||
project_to_file_ids[ObjectId(project_id)].append(ObjectId(file_id))
|
|
||||||
|
|
||||||
MockUpdateResult = collections.namedtuple('MockUpdateResult', 'matched_count modified_count')
|
|
||||||
|
|
||||||
files_coll = current_app.db('files')
|
|
||||||
total_matched = total_modified = 0
|
|
||||||
for project_oid, file_oids in project_to_file_ids.items():
|
|
||||||
query = {'_id': {'$in': file_oids}}
|
|
||||||
|
|
||||||
if go:
|
|
||||||
result = files_coll.update_many(query, {'$set': {'project': project_oid}})
|
|
||||||
else:
|
|
||||||
found = files_coll.count_documents(query)
|
|
||||||
result = MockUpdateResult(found, 0)
|
|
||||||
|
|
||||||
total_matched += result.matched_count
|
|
||||||
total_modified += result.modified_count
|
|
||||||
|
|
||||||
if result.matched_count != len(file_oids):
|
|
||||||
log.warning('Matched only %d of %d files; modified %d; for project %s',
|
|
||||||
result.matched_count, len(file_oids), result.modified_count, project_oid)
|
|
||||||
else:
|
|
||||||
log.info('Matched all %d files; modified %d; for project %s',
|
|
||||||
result.matched_count, result.modified_count, project_oid)
|
|
||||||
|
|
||||||
log.info('Done updating %d files (found %d, modified %d) on %d projects',
|
|
||||||
len(mapping), total_matched, total_modified, len(project_to_file_ids))
|
|
||||||
|
|
||||||
|
|
||||||
@manager_maintenance.option('-u', '--user', dest='user', nargs='?',
|
|
||||||
help='Update subscriptions for single user.')
|
|
||||||
@manager_maintenance.option('-o', '--object', dest='context_object', nargs='?',
|
|
||||||
help='Update subscriptions for context_object.')
|
|
||||||
@manager_maintenance.option('-g', '--go', dest='go', action='store_true', default=False,
|
|
||||||
help='Actually perform the changes (otherwise just show as dry-run).')
|
|
||||||
def fix_missing_activities_subscription_defaults(user=None, context_object=None, go=False):
|
|
||||||
"""Assign default values to activities-subscriptions documents where values are missing.
|
|
||||||
"""
|
|
||||||
|
|
||||||
subscriptions_collection = current_app.db('activities-subscriptions')
|
|
||||||
lookup_is_subscribed = {
|
|
||||||
'is_subscribed': {'$exists': False},
|
|
||||||
}
|
|
||||||
|
|
||||||
lookup_notifications = {
|
|
||||||
'notifications.web': {'$exists': False},
|
|
||||||
}
|
|
||||||
|
|
||||||
if user:
|
|
||||||
lookup_is_subscribed['user'] = ObjectId(user)
|
|
||||||
lookup_notifications['user'] = ObjectId(user)
|
|
||||||
|
|
||||||
if context_object:
|
|
||||||
lookup_is_subscribed['context_object'] = ObjectId(context_object)
|
|
||||||
lookup_notifications['context_object'] = ObjectId(context_object)
|
|
||||||
|
|
||||||
num_need_is_subscribed_update = subscriptions_collection.count_documents(lookup_is_subscribed)
|
|
||||||
log.info("Found %d documents that needs to be update 'is_subscribed'", num_need_is_subscribed_update)
|
|
||||||
num_need_notification_web_update = subscriptions_collection.count_documents(lookup_notifications)
|
|
||||||
log.info("Found %d documents that needs to be update 'notifications.web'", num_need_notification_web_update)
|
|
||||||
|
|
||||||
if not go:
|
|
||||||
return
|
|
||||||
|
|
||||||
if num_need_is_subscribed_update > 0:
|
|
||||||
log.info("Updating 'is_subscribed'")
|
|
||||||
resp = subscriptions_collection.update_many(
|
|
||||||
lookup_is_subscribed,
|
|
||||||
{
|
|
||||||
'$set': {'is_subscribed': True}
|
|
||||||
},
|
|
||||||
upsert=False
|
|
||||||
)
|
|
||||||
if resp.modified_count != num_need_is_subscribed_update:
|
|
||||||
log.warning("Expected % documents to be update, was %d",
|
|
||||||
num_need_is_subscribed_update, resp['nModified'])
|
|
||||||
|
|
||||||
if num_need_notification_web_update > 0:
|
|
||||||
log.info("Updating 'notifications.web'")
|
|
||||||
resp = subscriptions_collection.update_many(
|
|
||||||
lookup_notifications,
|
|
||||||
{
|
|
||||||
'$set': {'notifications.web': True}
|
|
||||||
},
|
|
||||||
upsert=False
|
|
||||||
)
|
|
||||||
if resp.modified_count != num_need_notification_web_update:
|
|
||||||
log.warning("Expected % documents to be update, was %d",
|
|
||||||
num_need_notification_web_update, resp['nModified'])
|
|
||||||
|
|
||||||
log.info("Done updating 'activities-subscriptions' documents")
|
|
||||||
|
@ -165,6 +165,49 @@ def merge_project(src_proj_url, dest_proj_url):
|
|||||||
log.info('Done moving.')
|
log.info('Done moving.')
|
||||||
|
|
||||||
|
|
||||||
|
@manager_operations.command
|
||||||
|
def index_users_rebuild():
|
||||||
|
"""Clear users index, update settings and reindex all users."""
|
||||||
|
|
||||||
|
import concurrent.futures
|
||||||
|
|
||||||
|
from pillar.api.utils.algolia import algolia_index_user_save
|
||||||
|
|
||||||
|
users_index = current_app.algolia_index_users
|
||||||
|
if users_index is None:
|
||||||
|
log.error('Algolia is not configured properly, unable to do anything!')
|
||||||
|
return 1
|
||||||
|
|
||||||
|
log.info('Dropping existing index: %s', users_index)
|
||||||
|
users_index.clear_index()
|
||||||
|
index_users_update_settings()
|
||||||
|
|
||||||
|
db = current_app.db()
|
||||||
|
users = db['users'].find({'_deleted': {'$ne': True}})
|
||||||
|
user_count = users.count()
|
||||||
|
|
||||||
|
log.info('Reindexing all %i users', user_count)
|
||||||
|
|
||||||
|
real_current_app = current_app._get_current_object()._get_current_object()
|
||||||
|
|
||||||
|
def do_user(user):
|
||||||
|
with real_current_app.app_context():
|
||||||
|
algolia_index_user_save(user)
|
||||||
|
|
||||||
|
with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
|
||||||
|
future_to_user = {executor.submit(do_user, user): user
|
||||||
|
for user in users}
|
||||||
|
for idx, future in enumerate(concurrent.futures.as_completed(future_to_user)):
|
||||||
|
user = future_to_user[future]
|
||||||
|
user_ident = user.get('email') or user.get('_id')
|
||||||
|
try:
|
||||||
|
future.result()
|
||||||
|
except Exception:
|
||||||
|
log.exception('Error updating user %i/%i %s', idx + 1, user_count, user_ident)
|
||||||
|
else:
|
||||||
|
log.info('Updated user %i/%i %s', idx + 1, user_count, user_ident)
|
||||||
|
|
||||||
|
|
||||||
@manager_operations.command
|
@manager_operations.command
|
||||||
def index_users_update_settings():
|
def index_users_update_settings():
|
||||||
"""Configure indexing backend as required by the project"""
|
"""Configure indexing backend as required by the project"""
|
||||||
@ -191,7 +234,7 @@ def hash_auth_tokens():
|
|||||||
tokens_coll = current_app.db('tokens')
|
tokens_coll = current_app.db('tokens')
|
||||||
query = {'token': {'$exists': True}}
|
query = {'token': {'$exists': True}}
|
||||||
cursor = tokens_coll.find(query, projection={'token': 1, '_id': 1})
|
cursor = tokens_coll.find(query, projection={'token': 1, '_id': 1})
|
||||||
log.info('Updating %d tokens', tokens_coll.count_documents(query))
|
log.info('Updating %d tokens', cursor.count())
|
||||||
|
|
||||||
for token_doc in cursor:
|
for token_doc in cursor:
|
||||||
hashed_token = hash_auth_token(token_doc['token'])
|
hashed_token = hash_auth_token(token_doc['token'])
|
||||||
|
@ -1,8 +1,6 @@
|
|||||||
from collections import defaultdict
|
|
||||||
import datetime
|
|
||||||
import os.path
|
import os.path
|
||||||
from os import getenv
|
from os import getenv
|
||||||
|
from collections import defaultdict
|
||||||
import requests.certs
|
import requests.certs
|
||||||
|
|
||||||
# Certificate file for communication with other systems.
|
# Certificate file for communication with other systems.
|
||||||
@ -31,11 +29,10 @@ DEBUG = False
|
|||||||
SECRET_KEY = ''
|
SECRET_KEY = ''
|
||||||
|
|
||||||
# Authentication token hashing key. If empty falls back to UTF8-encoded SECRET_KEY with a warning.
|
# Authentication token hashing key. If empty falls back to UTF8-encoded SECRET_KEY with a warning.
|
||||||
# Not used to hash new tokens, but it is used to check pre-existing hashed tokens.
|
|
||||||
AUTH_TOKEN_HMAC_KEY = b''
|
AUTH_TOKEN_HMAC_KEY = b''
|
||||||
|
|
||||||
# Authentication settings
|
# Authentication settings
|
||||||
BLENDER_ID_ENDPOINT = 'http://id.local:8000/'
|
BLENDER_ID_ENDPOINT = 'http://id.local:8000'
|
||||||
|
|
||||||
CDN_USE_URL_SIGNING = True
|
CDN_USE_URL_SIGNING = True
|
||||||
CDN_SERVICE_DOMAIN_PROTOCOL = 'https'
|
CDN_SERVICE_DOMAIN_PROTOCOL = 'https'
|
||||||
@ -195,7 +192,7 @@ BLENDER_CLOUD_ADDON_VERSION = '1.4'
|
|||||||
TLS_CERT_FILE = requests.certs.where()
|
TLS_CERT_FILE = requests.certs.where()
|
||||||
|
|
||||||
CELERY_BACKEND = 'redis://redis/1'
|
CELERY_BACKEND = 'redis://redis/1'
|
||||||
CELERY_BROKER = 'redis://redis/2'
|
CELERY_BROKER = 'amqp://guest:guest@rabbit//'
|
||||||
|
|
||||||
# This configures the Celery task scheduler in such a way that we don't
|
# This configures the Celery task scheduler in such a way that we don't
|
||||||
# have to import the pillar.celery.XXX modules. Remember to run
|
# have to import the pillar.celery.XXX modules. Remember to run
|
||||||
@ -206,20 +203,8 @@ CELERY_BEAT_SCHEDULE = {
|
|||||||
'schedule': 600, # every N seconds
|
'schedule': 600, # every N seconds
|
||||||
'args': ('gcs', 100)
|
'args': ('gcs', 100)
|
||||||
},
|
},
|
||||||
'refresh-blenderid-badges': {
|
|
||||||
'task': 'pillar.celery.badges.sync_badges_for_users',
|
|
||||||
'schedule': 10 * 60, # every N seconds
|
|
||||||
'args': (9 * 60, ), # time limit in seconds, keep shorter than 'schedule'
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# Badges will be re-fetched every timedelta.
|
|
||||||
# TODO(Sybren): A proper value should be determined after we actually have users with badges.
|
|
||||||
BLENDER_ID_BADGE_EXPIRY = datetime.timedelta(hours=4)
|
|
||||||
|
|
||||||
# How many times the Celery task for downloading an avatar is retried.
|
|
||||||
AVATAR_DOWNLOAD_CELERY_RETRY = 3
|
|
||||||
|
|
||||||
# Mapping from user role to capabilities obtained by users with that role.
|
# Mapping from user role to capabilities obtained by users with that role.
|
||||||
USER_CAPABILITIES = defaultdict(**{
|
USER_CAPABILITIES = defaultdict(**{
|
||||||
'subscriber': {'subscriber', 'home-project'},
|
'subscriber': {'subscriber', 'home-project'},
|
||||||
@ -272,14 +257,3 @@ STATIC_FILE_HASH = ''
|
|||||||
# all API endpoints do not need it. On the views that require it, we use the
|
# all API endpoints do not need it. On the views that require it, we use the
|
||||||
# current_app.csrf.protect() method.
|
# current_app.csrf.protect() method.
|
||||||
WTF_CSRF_CHECK_DEFAULT = False
|
WTF_CSRF_CHECK_DEFAULT = False
|
||||||
|
|
||||||
# Flask Debug Toolbar. Enable it by overriding DEBUG_TB_ENABLED in config_local.py.
|
|
||||||
DEBUG_TB_ENABLED = False
|
|
||||||
DEBUG_TB_PANELS = [
|
|
||||||
'flask_debugtoolbar.panels.versions.VersionDebugPanel',
|
|
||||||
'flask_debugtoolbar.panels.headers.HeaderDebugPanel',
|
|
||||||
'flask_debugtoolbar.panels.request_vars.RequestVarsDebugPanel',
|
|
||||||
'flask_debugtoolbar.panels.config_vars.ConfigVarsDebugPanel',
|
|
||||||
'flask_debugtoolbar.panels.template.TemplateDebugPanel',
|
|
||||||
'flask_debugtoolbar.panels.logger.LoggingPanel',
|
|
||||||
'flask_debugtoolbar.panels.route_list.RouteListDebugPanel']
|
|
||||||
|
@ -4,7 +4,7 @@ This is for user-generated stuff, like comments.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import bleach
|
import bleach
|
||||||
import commonmark
|
import CommonMark
|
||||||
|
|
||||||
from . import shortcodes
|
from . import shortcodes
|
||||||
|
|
||||||
@ -44,7 +44,7 @@ ALLOWED_STYLES = [
|
|||||||
|
|
||||||
def markdown(s: str) -> str:
|
def markdown(s: str) -> str:
|
||||||
commented_shortcodes = shortcodes.comment_shortcodes(s)
|
commented_shortcodes = shortcodes.comment_shortcodes(s)
|
||||||
tainted_html = commonmark.commonmark(commented_shortcodes)
|
tainted_html = CommonMark.commonmark(commented_shortcodes)
|
||||||
|
|
||||||
# Create a Cleaner that supports parsing of bare links (see filters).
|
# Create a Cleaner that supports parsing of bare links (see filters).
|
||||||
cleaner = bleach.Cleaner(tags=ALLOWED_TAGS,
|
cleaner = bleach.Cleaner(tags=ALLOWED_TAGS,
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
import flask
|
|
||||||
import raven.breadcrumbs
|
|
||||||
from raven.contrib.flask import Sentry
|
from raven.contrib.flask import Sentry
|
||||||
|
|
||||||
from .auth import current_user
|
from .auth import current_user
|
||||||
@ -16,14 +14,16 @@ class PillarSentry(Sentry):
|
|||||||
def init_app(self, app, *args, **kwargs):
|
def init_app(self, app, *args, **kwargs):
|
||||||
super().init_app(app, *args, **kwargs)
|
super().init_app(app, *args, **kwargs)
|
||||||
|
|
||||||
flask.request_started.connect(self.__add_sentry_breadcrumbs, self)
|
# We perform authentication of the user while handling the request,
|
||||||
|
# so Sentry calls get_user_info() too early.
|
||||||
|
|
||||||
def __add_sentry_breadcrumbs(self, sender, **extra):
|
def get_user_context_again(self, ):
|
||||||
raven.breadcrumbs.record(
|
from flask import request
|
||||||
message='Request started',
|
|
||||||
category='http',
|
try:
|
||||||
data={'url': flask.request.url}
|
self.client.user_context(self.get_user_info(request))
|
||||||
)
|
except Exception as e:
|
||||||
|
self.client.logger.exception(str(e))
|
||||||
|
|
||||||
def get_user_info(self, request):
|
def get_user_info(self, request):
|
||||||
user_info = super().get_user_info(request)
|
user_info = super().get_user_info(request)
|
||||||
|
@ -163,11 +163,11 @@ class YouTube:
|
|||||||
return html_module.escape('{youtube invalid YouTube ID/URL}')
|
return html_module.escape('{youtube invalid YouTube ID/URL}')
|
||||||
|
|
||||||
src = f'https://www.youtube.com/embed/{youtube_id}?rel=0'
|
src = f'https://www.youtube.com/embed/{youtube_id}?rel=0'
|
||||||
html = f'<div class="embed-responsive embed-responsive-16by9">' \
|
iframe_tag = f'<iframe class="shortcode youtube embed-responsive-item" width="{width}"' \
|
||||||
f'<iframe class="shortcode youtube embed-responsive-item"' \
|
f' height="{height}" src="{src}" frameborder="0" allow="autoplay; encrypted-media"' \
|
||||||
f' width="{width}" height="{height}" src="{src}"' \
|
f' allowfullscreen></iframe>'
|
||||||
f' frameborder="0" allow="autoplay; encrypted-media" allowfullscreen></iframe>' \
|
# Embed the iframe in a container, to allow easier styling
|
||||||
f'</div>'
|
html = f'<div class="embed-responsive embed-responsive-16by9">{iframe_tag}</div>'
|
||||||
return html
|
return html
|
||||||
|
|
||||||
|
|
||||||
@ -228,25 +228,12 @@ class Attachment:
|
|||||||
|
|
||||||
return self.render(file_doc, pargs, kwargs)
|
return self.render(file_doc, pargs, kwargs)
|
||||||
|
|
||||||
def sdk_file(self, slug: str, document: dict) -> pillarsdk.File:
|
def sdk_file(self, slug: str, node_properties: dict) -> pillarsdk.File:
|
||||||
"""Return the file document for the attachment with this slug."""
|
"""Return the file document for the attachment with this slug."""
|
||||||
|
|
||||||
from pillar.web import system_util
|
from pillar.web import system_util
|
||||||
|
|
||||||
# TODO (fsiddi) Make explicit what 'document' is.
|
attachments = node_properties.get('attachments', {})
|
||||||
# In some cases we pass the entire node or project documents, in other cases
|
|
||||||
# we pass node.properties. This should be unified at the level of do_markdown.
|
|
||||||
# For now we do a quick hack and first look for 'properties' in the doc,
|
|
||||||
# then we look for 'attachments'.
|
|
||||||
|
|
||||||
doc_properties = document.get('properties')
|
|
||||||
if doc_properties:
|
|
||||||
# We passed an entire document (all nodes must have 'properties')
|
|
||||||
attachments = doc_properties.get('attachments', {})
|
|
||||||
else:
|
|
||||||
# The value of document could have been defined as 'node.properties'
|
|
||||||
attachments = document.get('attachments', {})
|
|
||||||
|
|
||||||
attachment = attachments.get(slug)
|
attachment = attachments.get(slug)
|
||||||
if not attachment:
|
if not attachment:
|
||||||
raise self.NoSuchSlug(slug)
|
raise self.NoSuchSlug(slug)
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
# -*- encoding: utf-8 -*-
|
# -*- encoding: utf-8 -*-
|
||||||
|
|
||||||
import base64
|
import base64
|
||||||
import contextlib
|
|
||||||
import copy
|
import copy
|
||||||
import datetime
|
import datetime
|
||||||
import json
|
import json
|
||||||
@ -11,7 +10,11 @@ import pathlib
|
|||||||
import sys
|
import sys
|
||||||
import typing
|
import typing
|
||||||
import unittest.mock
|
import unittest.mock
|
||||||
from urllib.parse import urlencode, urljoin
|
|
||||||
|
try:
|
||||||
|
from urllib.parse import urlencode
|
||||||
|
except ImportError:
|
||||||
|
from urllib.parse import urlencode
|
||||||
|
|
||||||
from bson import ObjectId, tz_util
|
from bson import ObjectId, tz_util
|
||||||
|
|
||||||
@ -24,7 +27,6 @@ from eve.tests import TestMinimal
|
|||||||
import pymongo.collection
|
import pymongo.collection
|
||||||
from flask.testing import FlaskClient
|
from flask.testing import FlaskClient
|
||||||
import flask.ctx
|
import flask.ctx
|
||||||
import flask.wrappers
|
|
||||||
import responses
|
import responses
|
||||||
|
|
||||||
import pillar
|
import pillar
|
||||||
@ -174,10 +176,6 @@ class AbstractPillarTest(TestMinimal):
|
|||||||
for modname in remove:
|
for modname in remove:
|
||||||
del sys.modules[modname]
|
del sys.modules[modname]
|
||||||
|
|
||||||
def url_for(self, endpoint, **values):
|
|
||||||
with self.app.app_context():
|
|
||||||
return flask.url_for(endpoint, **values)
|
|
||||||
|
|
||||||
def ensure_file_exists(self, file_overrides=None, *, example_file=None) -> (ObjectId, dict):
|
def ensure_file_exists(self, file_overrides=None, *, example_file=None) -> (ObjectId, dict):
|
||||||
if example_file is None:
|
if example_file is None:
|
||||||
example_file = ctd.EXAMPLE_FILE
|
example_file = ctd.EXAMPLE_FILE
|
||||||
@ -187,7 +185,7 @@ class AbstractPillarTest(TestMinimal):
|
|||||||
else:
|
else:
|
||||||
self.ensure_project_exists()
|
self.ensure_project_exists()
|
||||||
|
|
||||||
with self.app.app_context():
|
with self.app.test_request_context():
|
||||||
files_collection = self.app.data.driver.db['files']
|
files_collection = self.app.data.driver.db['files']
|
||||||
assert isinstance(files_collection, pymongo.collection.Collection)
|
assert isinstance(files_collection, pymongo.collection.Collection)
|
||||||
|
|
||||||
@ -328,48 +326,15 @@ class AbstractPillarTest(TestMinimal):
|
|||||||
|
|
||||||
return user
|
return user
|
||||||
|
|
||||||
@contextlib.contextmanager
|
def create_valid_auth_token(self, user_id, token='token'):
|
||||||
def login_as(self, user_id: typing.Union[str, ObjectId]):
|
|
||||||
"""Context manager, within the context the app context is active and the user logged in.
|
|
||||||
|
|
||||||
The logging-in happens when a request starts, so it's only active when
|
|
||||||
e.g. self.get() or self.post() or somesuch request is used.
|
|
||||||
"""
|
|
||||||
from pillar.auth import UserClass, login_user_object
|
|
||||||
|
|
||||||
if isinstance(user_id, str):
|
|
||||||
user_oid = ObjectId(user_id)
|
|
||||||
elif isinstance(user_id, ObjectId):
|
|
||||||
user_oid = user_id
|
|
||||||
else:
|
|
||||||
raise TypeError(f'invalid type {type(user_id)} for parameter user_id')
|
|
||||||
user_doc = self.fetch_user_from_db(user_oid)
|
|
||||||
|
|
||||||
def signal_handler(sender, **kwargs):
|
|
||||||
login_user_object(user)
|
|
||||||
|
|
||||||
with self.app.app_context():
|
|
||||||
user = UserClass.construct('', user_doc)
|
|
||||||
with flask.request_started.connected_to(signal_handler, self.app):
|
|
||||||
yield
|
|
||||||
|
|
||||||
# TODO: rename to 'create_auth_token' now that 'expire_in_days' can be negative.
|
|
||||||
def create_valid_auth_token(self,
|
|
||||||
user_id: typing.Union[str, ObjectId],
|
|
||||||
token='token',
|
|
||||||
*,
|
|
||||||
oauth_scopes: typing.Optional[typing.List[str]]=None,
|
|
||||||
expire_in_days=1) -> dict:
|
|
||||||
from pillar.api.utils import utcnow
|
from pillar.api.utils import utcnow
|
||||||
|
|
||||||
if isinstance(user_id, str):
|
future = utcnow() + datetime.timedelta(days=1)
|
||||||
user_id = ObjectId(user_id)
|
|
||||||
future = utcnow() + datetime.timedelta(days=expire_in_days)
|
|
||||||
|
|
||||||
with self.app.test_request_context():
|
with self.app.test_request_context():
|
||||||
from pillar.api.utils import authentication as auth
|
from pillar.api.utils import authentication as auth
|
||||||
|
|
||||||
token_data = auth.store_token(user_id, token, future, oauth_scopes=oauth_scopes)
|
token_data = auth.store_token(user_id, token, future, None)
|
||||||
|
|
||||||
return token_data
|
return token_data
|
||||||
|
|
||||||
@ -399,7 +364,7 @@ class AbstractPillarTest(TestMinimal):
|
|||||||
|
|
||||||
return user_id
|
return user_id
|
||||||
|
|
||||||
def create_node(self, node_doc) -> ObjectId:
|
def create_node(self, node_doc):
|
||||||
"""Creates a node, returning its ObjectId. """
|
"""Creates a node, returning its ObjectId. """
|
||||||
|
|
||||||
with self.app.test_request_context():
|
with self.app.test_request_context():
|
||||||
@ -441,7 +406,7 @@ class AbstractPillarTest(TestMinimal):
|
|||||||
"""Sets up Responses to mock unhappy validation flow."""
|
"""Sets up Responses to mock unhappy validation flow."""
|
||||||
|
|
||||||
responses.add(responses.POST,
|
responses.add(responses.POST,
|
||||||
urljoin(self.app.config['BLENDER_ID_ENDPOINT'], 'u/validate_token'),
|
'%s/u/validate_token' % self.app.config['BLENDER_ID_ENDPOINT'],
|
||||||
json={'status': 'fail'},
|
json={'status': 'fail'},
|
||||||
status=403)
|
status=403)
|
||||||
|
|
||||||
@ -449,7 +414,7 @@ class AbstractPillarTest(TestMinimal):
|
|||||||
"""Sets up Responses to mock happy validation flow."""
|
"""Sets up Responses to mock happy validation flow."""
|
||||||
|
|
||||||
responses.add(responses.POST,
|
responses.add(responses.POST,
|
||||||
urljoin(self.app.config['BLENDER_ID_ENDPOINT'], 'u/validate_token'),
|
'%s/u/validate_token' % self.app.config['BLENDER_ID_ENDPOINT'],
|
||||||
json=BLENDER_ID_USER_RESPONSE,
|
json=BLENDER_ID_USER_RESPONSE,
|
||||||
status=200)
|
status=200)
|
||||||
|
|
||||||
@ -520,10 +485,11 @@ class AbstractPillarTest(TestMinimal):
|
|||||||
|
|
||||||
def client_request(self, method, path, qs=None, expected_status=200, auth_token=None, json=None,
|
def client_request(self, method, path, qs=None, expected_status=200, auth_token=None, json=None,
|
||||||
data=None, headers=None, files=None, content_type=None, etag=None,
|
data=None, headers=None, files=None, content_type=None, etag=None,
|
||||||
environ_overrides=None) -> flask.wrappers.Response:
|
environ_overrides=None):
|
||||||
"""Performs a HTTP request to the server."""
|
"""Performs a HTTP request to the server."""
|
||||||
|
|
||||||
from pillar.api.utils import dumps
|
from pillar.api.utils import dumps
|
||||||
|
import json as mod_json
|
||||||
|
|
||||||
headers = headers or {}
|
headers = headers or {}
|
||||||
environ_overrides = environ_overrides or {}
|
environ_overrides = environ_overrides or {}
|
||||||
@ -556,21 +522,29 @@ class AbstractPillarTest(TestMinimal):
|
|||||||
expected_status, resp.status_code, resp.data
|
expected_status, resp.status_code, resp.data
|
||||||
))
|
))
|
||||||
|
|
||||||
|
def get_json():
|
||||||
|
if resp.mimetype != 'application/json':
|
||||||
|
raise TypeError('Unable to load JSON from mimetype %r' % resp.mimetype)
|
||||||
|
return mod_json.loads(resp.data)
|
||||||
|
|
||||||
|
resp.json = get_json
|
||||||
|
resp.get_json = get_json
|
||||||
|
|
||||||
return resp
|
return resp
|
||||||
|
|
||||||
def get(self, *args, **kwargs) -> flask.wrappers.Response:
|
def get(self, *args, **kwargs):
|
||||||
return self.client_request('GET', *args, **kwargs)
|
return self.client_request('GET', *args, **kwargs)
|
||||||
|
|
||||||
def post(self, *args, **kwargs) -> flask.wrappers.Response:
|
def post(self, *args, **kwargs):
|
||||||
return self.client_request('POST', *args, **kwargs)
|
return self.client_request('POST', *args, **kwargs)
|
||||||
|
|
||||||
def put(self, *args, **kwargs) -> flask.wrappers.Response:
|
def put(self, *args, **kwargs):
|
||||||
return self.client_request('PUT', *args, **kwargs)
|
return self.client_request('PUT', *args, **kwargs)
|
||||||
|
|
||||||
def delete(self, *args, **kwargs) -> flask.wrappers.Response:
|
def delete(self, *args, **kwargs):
|
||||||
return self.client_request('DELETE', *args, **kwargs)
|
return self.client_request('DELETE', *args, **kwargs)
|
||||||
|
|
||||||
def patch(self, *args, **kwargs) -> flask.wrappers.Response:
|
def patch(self, *args, **kwargs):
|
||||||
return self.client_request('PATCH', *args, **kwargs)
|
return self.client_request('PATCH', *args, **kwargs)
|
||||||
|
|
||||||
def assertAllowsAccess(self,
|
def assertAllowsAccess(self,
|
||||||
@ -587,7 +561,7 @@ class AbstractPillarTest(TestMinimal):
|
|||||||
raise TypeError('expected_user_id should be a string or ObjectId, '
|
raise TypeError('expected_user_id should be a string or ObjectId, '
|
||||||
f'but is {expected_user_id!r}')
|
f'but is {expected_user_id!r}')
|
||||||
|
|
||||||
resp = self.get('/api/users/me', expected_status=200, auth_token=token).get_json()
|
resp = self.get('/api/users/me', expected_status=200, auth_token=token).json()
|
||||||
|
|
||||||
if expected_user_id:
|
if expected_user_id:
|
||||||
self.assertEqual(resp['_id'], str(expected_user_id))
|
self.assertEqual(resp['_id'], str(expected_user_id))
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
"""Flask configuration file for unit testing."""
|
"""Flask configuration file for unit testing."""
|
||||||
|
|
||||||
BLENDER_ID_ENDPOINT = 'http://id.local:8001/' # Non existant server
|
BLENDER_ID_ENDPOINT = 'http://id.local:8001' # Non existant server
|
||||||
|
|
||||||
SERVER_NAME = 'localhost.local'
|
SERVER_NAME = 'localhost'
|
||||||
PILLAR_SERVER_ENDPOINT = 'http://localhost.local/api/'
|
PILLAR_SERVER_ENDPOINT = 'http://localhost/api/'
|
||||||
|
|
||||||
MAIN_PROJECT_ID = '5672beecc0261b2005ed1a33'
|
MAIN_PROJECT_ID = '5672beecc0261b2005ed1a33'
|
||||||
|
|
||||||
@ -44,5 +44,3 @@ ELASTIC_INDICES = {
|
|||||||
|
|
||||||
# MUST be 8 characters long, see pillar.flask_extra.HashedPathConverter
|
# MUST be 8 characters long, see pillar.flask_extra.HashedPathConverter
|
||||||
STATIC_FILE_HASH = 'abcd1234'
|
STATIC_FILE_HASH = 'abcd1234'
|
||||||
|
|
||||||
CACHE_NO_NULL_WARNING = True
|
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
from pillar.api.eve_settings import *
|
from pillar.api.eve_settings import *
|
||||||
|
|
||||||
MONGO_DBNAME = 'pillar_test'
|
MONGO_DBNAME = 'pillar_test'
|
||||||
MONGO_USERNAME = None
|
|
||||||
|
|
||||||
|
|
||||||
def override_eve():
|
def override_eve():
|
||||||
@ -11,7 +10,5 @@ def override_eve():
|
|||||||
test_settings.MONGO_HOST = MONGO_HOST
|
test_settings.MONGO_HOST = MONGO_HOST
|
||||||
test_settings.MONGO_PORT = MONGO_PORT
|
test_settings.MONGO_PORT = MONGO_PORT
|
||||||
test_settings.MONGO_DBNAME = MONGO_DBNAME
|
test_settings.MONGO_DBNAME = MONGO_DBNAME
|
||||||
test_settings.MONGO1_USERNAME = MONGO_USERNAME
|
|
||||||
tests.MONGO_HOST = MONGO_HOST
|
tests.MONGO_HOST = MONGO_HOST
|
||||||
tests.MONGO_DBNAME = MONGO_DBNAME
|
tests.MONGO_DBNAME = MONGO_DBNAME
|
||||||
tests.MONGO_USERNAME = MONGO_USERNAME
|
|
||||||
|
@ -10,12 +10,9 @@ import flask_login
|
|||||||
import jinja2.filters
|
import jinja2.filters
|
||||||
import jinja2.utils
|
import jinja2.utils
|
||||||
import werkzeug.exceptions as wz_exceptions
|
import werkzeug.exceptions as wz_exceptions
|
||||||
from werkzeug.local import LocalProxy
|
|
||||||
|
|
||||||
import pillarsdk
|
import pillarsdk
|
||||||
|
|
||||||
import pillar.api.utils
|
import pillar.api.utils
|
||||||
from pillar.api.utils import pretty_duration
|
|
||||||
from pillar.web.utils import pretty_date
|
from pillar.web.utils import pretty_date
|
||||||
from pillar.web.nodes.routes import url_for_node
|
from pillar.web.nodes.routes import url_for_node
|
||||||
import pillar.markdown
|
import pillar.markdown
|
||||||
@ -31,14 +28,6 @@ def format_pretty_date_time(d):
|
|||||||
return pretty_date(d, detail=True)
|
return pretty_date(d, detail=True)
|
||||||
|
|
||||||
|
|
||||||
def format_pretty_duration(s):
|
|
||||||
return pretty_duration(s)
|
|
||||||
|
|
||||||
|
|
||||||
def format_pretty_duration_fractional(s):
|
|
||||||
return pillar.api.utils.pretty_duration_fractional(s)
|
|
||||||
|
|
||||||
|
|
||||||
def format_undertitle(s):
|
def format_undertitle(s):
|
||||||
"""Underscore-replacing title filter.
|
"""Underscore-replacing title filter.
|
||||||
|
|
||||||
@ -211,23 +200,9 @@ def do_yesno(value, arg=None):
|
|||||||
return no
|
return no
|
||||||
|
|
||||||
|
|
||||||
def do_json(some_object: typing.Any) -> str:
|
|
||||||
import pillar.auth
|
|
||||||
|
|
||||||
if isinstance(some_object, LocalProxy):
|
|
||||||
return do_json(some_object._get_current_object())
|
|
||||||
if isinstance(some_object, pillarsdk.Resource):
|
|
||||||
some_object = some_object.to_dict()
|
|
||||||
if isinstance(some_object, pillar.auth.UserClass):
|
|
||||||
some_object = some_object.frontend_info()
|
|
||||||
return pillar.api.utils.dumps(some_object)
|
|
||||||
|
|
||||||
|
|
||||||
def setup_jinja_env(jinja_env, app_config: dict):
|
def setup_jinja_env(jinja_env, app_config: dict):
|
||||||
jinja_env.filters['pretty_date'] = format_pretty_date
|
jinja_env.filters['pretty_date'] = format_pretty_date
|
||||||
jinja_env.filters['pretty_date_time'] = format_pretty_date_time
|
jinja_env.filters['pretty_date_time'] = format_pretty_date_time
|
||||||
jinja_env.filters['pretty_duration'] = format_pretty_duration
|
|
||||||
jinja_env.filters['pretty_duration_fractional'] = format_pretty_duration_fractional
|
|
||||||
jinja_env.filters['undertitle'] = format_undertitle
|
jinja_env.filters['undertitle'] = format_undertitle
|
||||||
jinja_env.filters['hide_none'] = do_hide_none
|
jinja_env.filters['hide_none'] = do_hide_none
|
||||||
jinja_env.filters['pluralize'] = do_pluralize
|
jinja_env.filters['pluralize'] = do_pluralize
|
||||||
@ -237,7 +212,6 @@ def setup_jinja_env(jinja_env, app_config: dict):
|
|||||||
jinja_env.filters['yesno'] = do_yesno
|
jinja_env.filters['yesno'] = do_yesno
|
||||||
jinja_env.filters['repr'] = repr
|
jinja_env.filters['repr'] = repr
|
||||||
jinja_env.filters['urljoin'] = functools.partial(urllib.parse.urljoin, allow_fragments=True)
|
jinja_env.filters['urljoin'] = functools.partial(urllib.parse.urljoin, allow_fragments=True)
|
||||||
jinja_env.filters['json'] = do_json
|
|
||||||
jinja_env.globals['url_for_node'] = do_url_for_node
|
jinja_env.globals['url_for_node'] = do_url_for_node
|
||||||
jinja_env.globals['abs_url'] = functools.partial(flask.url_for,
|
jinja_env.globals['abs_url'] = functools.partial(flask.url_for,
|
||||||
_external=True,
|
_external=True,
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
import logging
|
import logging
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
import warnings
|
|
||||||
|
|
||||||
from pillarsdk import Node
|
from pillarsdk import Node
|
||||||
from flask import Blueprint
|
from flask import Blueprint
|
||||||
@ -8,6 +7,7 @@ from flask import current_app
|
|||||||
from flask import render_template
|
from flask import render_template
|
||||||
from flask import redirect
|
from flask import redirect
|
||||||
from flask import request
|
from flask import request
|
||||||
|
from werkzeug.contrib.atom import AtomFeed
|
||||||
|
|
||||||
from pillar.flask_extra import ensure_schema
|
from pillar.flask_extra import ensure_schema
|
||||||
from pillar.web.utils import system_util
|
from pillar.web.utils import system_util
|
||||||
@ -91,11 +91,6 @@ def error_403():
|
|||||||
@blueprint.route('/feeds/blogs.atom')
|
@blueprint.route('/feeds/blogs.atom')
|
||||||
def feeds_blogs():
|
def feeds_blogs():
|
||||||
"""Global feed generator for latest blogposts across all projects"""
|
"""Global feed generator for latest blogposts across all projects"""
|
||||||
|
|
||||||
# Werkzeug deprecated their Atom feed. Tracked in https://developer.blender.org/T65274.
|
|
||||||
with warnings.catch_warnings():
|
|
||||||
from werkzeug.contrib.atom import AtomFeed
|
|
||||||
|
|
||||||
@current_app.cache.cached(60*5)
|
@current_app.cache.cached(60*5)
|
||||||
def render_page():
|
def render_page():
|
||||||
feed = AtomFeed('Blender Cloud - Latest updates',
|
feed = AtomFeed('Blender Cloud - Latest updates',
|
||||||
|
@ -19,19 +19,10 @@ def attachment_form_group_create(schema_prop):
|
|||||||
|
|
||||||
|
|
||||||
def _attachment_build_single_field(schema_prop):
|
def _attachment_build_single_field(schema_prop):
|
||||||
# 'keyschema' was renamed to 'keysrules' in Cerberus 1.3, but our data may still have the old
|
|
||||||
# names. Same for 'valueschema' and 'valuesrules'.
|
|
||||||
keysrules = schema_prop.get('keysrules') or schema_prop.get('keyschema')
|
|
||||||
if keysrules is None:
|
|
||||||
raise KeyError(f"missing 'keysrules' key in schema {schema_prop}")
|
|
||||||
valuesrules = schema_prop.get('valuesrules') or schema_prop.get('valueschema')
|
|
||||||
if valuesrules is None:
|
|
||||||
raise KeyError(f"missing 'valuesrules' key in schema {schema_prop}")
|
|
||||||
|
|
||||||
# Ugly hard-coded schema.
|
# Ugly hard-coded schema.
|
||||||
fake_schema = {
|
fake_schema = {
|
||||||
'slug': keysrules,
|
'slug': schema_prop['propertyschema'],
|
||||||
'oid': valuesrules['schema']['oid'],
|
'oid': schema_prop['valueschema']['schema']['oid'],
|
||||||
}
|
}
|
||||||
file_select_form_group = build_file_select_form(fake_schema)
|
file_select_form_group = build_file_select_form(fake_schema)
|
||||||
return file_select_form_group
|
return file_select_form_group
|
||||||
|
236
pillar/web/nodes/custom/comments.py
Normal file
236
pillar/web/nodes/custom/comments.py
Normal file
@ -0,0 +1,236 @@
|
|||||||
|
import logging
|
||||||
|
|
||||||
|
from flask import current_app
|
||||||
|
from flask import request
|
||||||
|
from flask import jsonify
|
||||||
|
from flask import render_template
|
||||||
|
from flask_login import login_required, current_user
|
||||||
|
from pillarsdk import Node
|
||||||
|
from pillarsdk import Project
|
||||||
|
import werkzeug.exceptions as wz_exceptions
|
||||||
|
|
||||||
|
from pillar.api.utils import utcnow
|
||||||
|
from pillar.web import subquery
|
||||||
|
from pillar.web.nodes.routes import blueprint
|
||||||
|
from pillar.web.utils import gravatar
|
||||||
|
from pillar.web.utils import pretty_date
|
||||||
|
from pillar.web.utils import system_util
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@blueprint.route('/comments/create', methods=['POST'])
|
||||||
|
@login_required
|
||||||
|
def comments_create():
|
||||||
|
content = request.form['content']
|
||||||
|
parent_id = request.form.get('parent_id')
|
||||||
|
|
||||||
|
if not parent_id:
|
||||||
|
log.warning('User %s tried to create comment without parent_id', current_user.objectid)
|
||||||
|
raise wz_exceptions.UnprocessableEntity()
|
||||||
|
|
||||||
|
api = system_util.pillar_api()
|
||||||
|
parent_node = Node.find(parent_id, api=api)
|
||||||
|
if not parent_node:
|
||||||
|
log.warning('Unable to create comment for user %s, parent node %r not found',
|
||||||
|
current_user.objectid, parent_id)
|
||||||
|
raise wz_exceptions.UnprocessableEntity()
|
||||||
|
|
||||||
|
log.info('Creating comment for user %s on parent node %r',
|
||||||
|
current_user.objectid, parent_id)
|
||||||
|
|
||||||
|
comment_props = dict(
|
||||||
|
project=parent_node.project,
|
||||||
|
name='Comment',
|
||||||
|
user=current_user.objectid,
|
||||||
|
node_type='comment',
|
||||||
|
properties=dict(
|
||||||
|
content=content,
|
||||||
|
status='published',
|
||||||
|
confidence=0,
|
||||||
|
rating_positive=0,
|
||||||
|
rating_negative=0))
|
||||||
|
|
||||||
|
if parent_id:
|
||||||
|
comment_props['parent'] = parent_id
|
||||||
|
|
||||||
|
# Get the parent node and check if it's a comment. In which case we flag
|
||||||
|
# the current comment as a reply.
|
||||||
|
parent_node = Node.find(parent_id, api=api)
|
||||||
|
if parent_node.node_type == 'comment':
|
||||||
|
comment_props['properties']['is_reply'] = True
|
||||||
|
|
||||||
|
comment = Node(comment_props)
|
||||||
|
comment.create(api=api)
|
||||||
|
|
||||||
|
return jsonify({'node_id': comment._id}), 201
|
||||||
|
|
||||||
|
|
||||||
|
@blueprint.route('/comments/<string(length=24):comment_id>', methods=['POST'])
|
||||||
|
@login_required
|
||||||
|
def comment_edit(comment_id):
|
||||||
|
"""Allows a user to edit their comment."""
|
||||||
|
from pillar.web import jinja
|
||||||
|
|
||||||
|
api = system_util.pillar_api()
|
||||||
|
|
||||||
|
comment = Node({'_id': comment_id})
|
||||||
|
result = comment.patch({'op': 'edit', 'content': request.form['content']}, api=api)
|
||||||
|
assert result['_status'] == 'OK'
|
||||||
|
|
||||||
|
return jsonify({
|
||||||
|
'status': 'success',
|
||||||
|
'data': {
|
||||||
|
'content': result.properties.content or '',
|
||||||
|
'content_html': jinja.do_markdowned(result.properties, 'content'),
|
||||||
|
}})
|
||||||
|
|
||||||
|
|
||||||
|
def format_comment(comment, is_reply=False, is_team=False, replies=None):
|
||||||
|
"""Format a comment node into a simpler dictionary.
|
||||||
|
|
||||||
|
:param comment: the comment object
|
||||||
|
:param is_reply: True if the comment is a reply to another comment
|
||||||
|
:param is_team: True if the author belongs to the group that owns the node
|
||||||
|
:param replies: list of replies (formatted with this function)
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
is_own = (current_user.objectid == comment.user._id) \
|
||||||
|
if current_user.is_authenticated else False
|
||||||
|
except AttributeError:
|
||||||
|
current_app.bugsnag.notify(Exception(
|
||||||
|
'Missing user for embedded user ObjectId'),
|
||||||
|
meta_data={'nodes_info': {'node_id': comment['_id']}})
|
||||||
|
return
|
||||||
|
is_rated = False
|
||||||
|
is_rated_positive = None
|
||||||
|
if comment.properties.ratings:
|
||||||
|
for rating in comment.properties.ratings:
|
||||||
|
if current_user.is_authenticated and rating.user == current_user.objectid:
|
||||||
|
is_rated = True
|
||||||
|
is_rated_positive = rating.is_positive
|
||||||
|
break
|
||||||
|
|
||||||
|
return dict(_id=comment._id,
|
||||||
|
gravatar=gravatar(comment.user.email, size=32),
|
||||||
|
time_published=pretty_date(comment._created or utcnow(), detail=True),
|
||||||
|
rating=comment.properties.rating_positive - comment.properties.rating_negative,
|
||||||
|
author=comment.user.full_name,
|
||||||
|
author_username=comment.user.username,
|
||||||
|
content=comment.properties.content,
|
||||||
|
is_reply=is_reply,
|
||||||
|
is_own=is_own,
|
||||||
|
is_rated=is_rated,
|
||||||
|
is_rated_positive=is_rated_positive,
|
||||||
|
is_team=is_team,
|
||||||
|
replies=replies)
|
||||||
|
|
||||||
|
|
||||||
|
@blueprint.route('/<string(length=24):node_id>/comments')
|
||||||
|
def comments_for_node(node_id):
|
||||||
|
"""Shows the comments attached to the given node.
|
||||||
|
|
||||||
|
The URL can be overridden in order to define can_post_comments in a different way
|
||||||
|
"""
|
||||||
|
|
||||||
|
api = system_util.pillar_api()
|
||||||
|
|
||||||
|
node = Node.find(node_id, api=api)
|
||||||
|
project = Project({'_id': node.project})
|
||||||
|
can_post_comments = project.node_type_has_method('comment', 'POST', api=api)
|
||||||
|
can_comment_override = request.args.get('can_comment', 'True') == 'True'
|
||||||
|
can_post_comments = can_post_comments and can_comment_override
|
||||||
|
|
||||||
|
return render_comments_for_node(node_id, can_post_comments=can_post_comments)
|
||||||
|
|
||||||
|
|
||||||
|
def render_comments_for_node(node_id: str, *, can_post_comments: bool):
|
||||||
|
"""Render the list of comments for a node."""
|
||||||
|
api = system_util.pillar_api()
|
||||||
|
|
||||||
|
# Query for all children, i.e. comments on the node.
|
||||||
|
comments = Node.all({
|
||||||
|
'where': {'node_type': 'comment', 'parent': node_id},
|
||||||
|
}, api=api)
|
||||||
|
|
||||||
|
def enrich(some_comment):
|
||||||
|
some_comment['_user'] = subquery.get_user_info(some_comment['user'])
|
||||||
|
some_comment['_is_own'] = some_comment['user'] == current_user.objectid
|
||||||
|
some_comment['_current_user_rating'] = None # tri-state boolean
|
||||||
|
some_comment[
|
||||||
|
'_rating'] = some_comment.properties.rating_positive - some_comment.properties.rating_negative
|
||||||
|
|
||||||
|
if current_user.is_authenticated:
|
||||||
|
for rating in some_comment.properties.ratings or ():
|
||||||
|
if rating.user != current_user.objectid:
|
||||||
|
continue
|
||||||
|
|
||||||
|
some_comment['_current_user_rating'] = rating.is_positive
|
||||||
|
|
||||||
|
for comment in comments['_items']:
|
||||||
|
# Query for all grandchildren, i.e. replies to comments on the node.
|
||||||
|
comment['_replies'] = Node.all({
|
||||||
|
'where': {'node_type': 'comment', 'parent': comment['_id']},
|
||||||
|
}, api=api)
|
||||||
|
|
||||||
|
enrich(comment)
|
||||||
|
for reply in comment['_replies']['_items']:
|
||||||
|
enrich(reply)
|
||||||
|
nr_of_comments = sum(1 + comment['_replies']['_meta']['total']
|
||||||
|
for comment in comments['_items'])
|
||||||
|
return render_template('nodes/custom/comment/list_embed.html',
|
||||||
|
node_id=node_id,
|
||||||
|
comments=comments,
|
||||||
|
nr_of_comments=nr_of_comments,
|
||||||
|
show_comments=True,
|
||||||
|
can_post_comments=can_post_comments)
|
||||||
|
|
||||||
|
|
||||||
|
@blueprint.route('/<string(length=24):node_id>/commentform')
|
||||||
|
def commentform_for_node(node_id):
|
||||||
|
"""Shows only the comment for for comments attached to the given node.
|
||||||
|
|
||||||
|
i.e. does not show the comments themselves, just the form to post a new comment.
|
||||||
|
"""
|
||||||
|
|
||||||
|
api = system_util.pillar_api()
|
||||||
|
|
||||||
|
node = Node.find(node_id, api=api)
|
||||||
|
project = Project({'_id': node.project})
|
||||||
|
can_post_comments = project.node_type_has_method('comment', 'POST', api=api)
|
||||||
|
|
||||||
|
return render_template('nodes/custom/comment/list_embed.html',
|
||||||
|
node_id=node_id,
|
||||||
|
show_comments=False,
|
||||||
|
can_post_comments=can_post_comments)
|
||||||
|
|
||||||
|
|
||||||
|
@blueprint.route("/comments/<comment_id>/rate/<operation>", methods=['POST'])
|
||||||
|
@login_required
|
||||||
|
def comments_rate(comment_id, operation):
|
||||||
|
"""Comment rating function
|
||||||
|
|
||||||
|
:param comment_id: the comment id
|
||||||
|
:type comment_id: str
|
||||||
|
:param rating: the rating (is cast from 0 to False and from 1 to True)
|
||||||
|
:type rating: int
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
if operation not in {'revoke', 'upvote', 'downvote'}:
|
||||||
|
raise wz_exceptions.BadRequest('Invalid operation')
|
||||||
|
|
||||||
|
api = system_util.pillar_api()
|
||||||
|
|
||||||
|
# PATCH the node and return the result.
|
||||||
|
comment = Node({'_id': comment_id})
|
||||||
|
result = comment.patch({'op': operation}, api=api)
|
||||||
|
assert result['_status'] == 'OK'
|
||||||
|
|
||||||
|
return jsonify({
|
||||||
|
'status': 'success',
|
||||||
|
'data': {
|
||||||
|
'op': operation,
|
||||||
|
'rating_positive': result.properties.rating_positive,
|
||||||
|
'rating_negative': result.properties.rating_negative,
|
||||||
|
}})
|
@ -19,7 +19,6 @@ from pillar.web.nodes.routes import url_for_node
|
|||||||
from pillar.web.nodes.forms import get_node_form
|
from pillar.web.nodes.forms import get_node_form
|
||||||
import pillar.web.nodes.attachments
|
import pillar.web.nodes.attachments
|
||||||
from pillar.web.projects.routes import project_update_nodes_list
|
from pillar.web.projects.routes import project_update_nodes_list
|
||||||
from pillar.web.projects.routes import project_navigation_links
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -62,10 +61,16 @@ def posts_view(project_id=None, project_url=None, url=None, *, archive=False, pa
|
|||||||
post.picture = get_file(post.picture, api=api)
|
post.picture = get_file(post.picture, api=api)
|
||||||
post.url = url_for_node(node=post)
|
post.url = url_for_node(node=post)
|
||||||
|
|
||||||
|
# Use the *_main_project.html template for the main blog
|
||||||
|
is_main_project = project_id == current_app.config['MAIN_PROJECT_ID']
|
||||||
|
main_project_template = '_main_project' if is_main_project else ''
|
||||||
|
main_project_template = '_main_project'
|
||||||
index_arch = 'archive' if archive else 'index'
|
index_arch = 'archive' if archive else 'index'
|
||||||
template_path = f'nodes/custom/blog/{index_arch}.html',
|
template_path = f'nodes/custom/blog/{index_arch}{main_project_template}.html',
|
||||||
|
|
||||||
if url:
|
if url:
|
||||||
|
template_path = f'nodes/custom/post/view{main_project_template}.html',
|
||||||
|
|
||||||
post = Node.find_one({
|
post = Node.find_one({
|
||||||
'where': {'parent': blog._id, 'properties.url': url},
|
'where': {'parent': blog._id, 'properties.url': url},
|
||||||
'embedded': {'node_type': 1, 'user': 1},
|
'embedded': {'node_type': 1, 'user': 1},
|
||||||
@ -90,7 +95,6 @@ def posts_view(project_id=None, project_url=None, url=None, *, archive=False, pa
|
|||||||
can_create_blog_posts = project.node_type_has_method('post', 'POST', api=api)
|
can_create_blog_posts = project.node_type_has_method('post', 'POST', api=api)
|
||||||
|
|
||||||
# Use functools.partial so we can later pass page=X.
|
# Use functools.partial so we can later pass page=X.
|
||||||
is_main_project = project_id == current_app.config['MAIN_PROJECT_ID']
|
|
||||||
if is_main_project:
|
if is_main_project:
|
||||||
url_func = functools.partial(url_for, 'main.main_blog_archive')
|
url_func = functools.partial(url_for, 'main.main_blog_archive')
|
||||||
else:
|
else:
|
||||||
@ -108,21 +112,24 @@ def posts_view(project_id=None, project_url=None, url=None, *, archive=False, pa
|
|||||||
else:
|
else:
|
||||||
project.blog_archive_prev = None
|
project.blog_archive_prev = None
|
||||||
|
|
||||||
navigation_links = project_navigation_links(project, api)
|
title = 'blog_main' if is_main_project else 'blog'
|
||||||
extension_sidebar_links = current_app.extension_sidebar_links(project)
|
|
||||||
|
pages = Node.all({
|
||||||
|
'where': {'project': project._id, 'node_type': 'page'},
|
||||||
|
'projection': {'name': 1}}, api=api)
|
||||||
|
|
||||||
return render_template(
|
return render_template(
|
||||||
template_path,
|
template_path,
|
||||||
blog=blog,
|
blog=blog,
|
||||||
node=post, # node is used by the generic comments rendering (see custom/_scripts.pug)
|
node=post,
|
||||||
posts=posts._items,
|
posts=posts._items,
|
||||||
posts_meta=pmeta,
|
posts_meta=pmeta,
|
||||||
more_posts_available=pmeta['total'] > pmeta['max_results'],
|
more_posts_available=pmeta['total'] > pmeta['max_results'],
|
||||||
project=project,
|
project=project,
|
||||||
|
title=title,
|
||||||
node_type_post=project.get_node_type('post'),
|
node_type_post=project.get_node_type('post'),
|
||||||
can_create_blog_posts=can_create_blog_posts,
|
can_create_blog_posts=can_create_blog_posts,
|
||||||
navigation_links=navigation_links,
|
pages=pages._items,
|
||||||
extension_sidebar_links=extension_sidebar_links,
|
|
||||||
api=api)
|
api=api)
|
||||||
|
|
||||||
|
|
||||||
|
@ -48,10 +48,7 @@ def find_for_comment(project, node):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
parent = Node.find_one({'where': {
|
parent = Node.find(parent.parent, api=api)
|
||||||
'_id': parent.parent,
|
|
||||||
'_deleted': {'$ne': True}
|
|
||||||
}}, api=api)
|
|
||||||
except ResourceNotFound:
|
except ResourceNotFound:
|
||||||
log.warning(
|
log.warning(
|
||||||
'url_for_node(node_id=%r): Unable to find parent node %r',
|
'url_for_node(node_id=%r): Unable to find parent node %r',
|
||||||
@ -97,16 +94,6 @@ def find_for_post(project, node):
|
|||||||
url=node.properties.url)
|
url=node.properties.url)
|
||||||
|
|
||||||
|
|
||||||
@register_node_finder('page')
|
|
||||||
def find_for_page(project, node):
|
|
||||||
"""Returns the URL for a page."""
|
|
||||||
|
|
||||||
project_id = project['_id']
|
|
||||||
|
|
||||||
the_project = project_url(project_id, project=project)
|
|
||||||
return url_for('projects.view_node', project_url=the_project.url, node_id=node.properties.url)
|
|
||||||
|
|
||||||
|
|
||||||
def find_for_other(project, node):
|
def find_for_other(project, node):
|
||||||
"""Fallback: Assets, textures, and other node types.
|
"""Fallback: Assets, textures, and other node types.
|
||||||
|
|
||||||
|
@ -1,10 +1,9 @@
|
|||||||
import functools
|
|
||||||
import logging
|
import logging
|
||||||
import typing
|
|
||||||
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from datetime import date
|
from datetime import date
|
||||||
import pillarsdk
|
import pillarsdk
|
||||||
|
from flask import current_app
|
||||||
from flask_wtf import FlaskForm
|
from flask_wtf import FlaskForm
|
||||||
from wtforms import StringField
|
from wtforms import StringField
|
||||||
from wtforms import DateField
|
from wtforms import DateField
|
||||||
@ -18,8 +17,6 @@ from wtforms import DateTimeField
|
|||||||
from wtforms import SelectMultipleField
|
from wtforms import SelectMultipleField
|
||||||
from wtforms import FieldList
|
from wtforms import FieldList
|
||||||
from wtforms.validators import DataRequired
|
from wtforms.validators import DataRequired
|
||||||
|
|
||||||
from pillar import current_app
|
|
||||||
from pillar.web.utils import system_util
|
from pillar.web.utils import system_util
|
||||||
from pillar.web.utils.forms import FileSelectField
|
from pillar.web.utils.forms import FileSelectField
|
||||||
from pillar.web.utils.forms import CustomFormField
|
from pillar.web.utils.forms import CustomFormField
|
||||||
@ -47,13 +44,6 @@ def iter_node_properties(node_type):
|
|||||||
yield prop_name, prop_schema, prop_fschema
|
yield prop_name, prop_schema, prop_fschema
|
||||||
|
|
||||||
|
|
||||||
@functools.lru_cache(maxsize=1)
|
|
||||||
def tag_choices() -> typing.List[typing.Tuple[str, str]]:
|
|
||||||
"""Return (value, label) tuples for the NODE_TAGS config setting."""
|
|
||||||
tags = current_app.config.get('NODE_TAGS') or []
|
|
||||||
return [(tag, tag.title()) for tag in tags] # (value, label) tuples
|
|
||||||
|
|
||||||
|
|
||||||
def add_form_properties(form_class, node_type):
|
def add_form_properties(form_class, node_type):
|
||||||
"""Add fields to a form based on the node and form schema provided.
|
"""Add fields to a form based on the node and form schema provided.
|
||||||
:type node_schema: dict
|
:type node_schema: dict
|
||||||
@ -70,9 +60,7 @@ def add_form_properties(form_class, node_type):
|
|||||||
# Recursive call if detects a dict
|
# Recursive call if detects a dict
|
||||||
field_type = schema_prop['type']
|
field_type = schema_prop['type']
|
||||||
|
|
||||||
if prop_name == 'tags' and field_type == 'list':
|
if field_type == 'dict':
|
||||||
field = SelectMultipleField(choices=tag_choices())
|
|
||||||
elif field_type == 'dict':
|
|
||||||
assert prop_name == 'attachments'
|
assert prop_name == 'attachments'
|
||||||
field = attachments.attachment_form_group_create(schema_prop)
|
field = attachments.attachment_form_group_create(schema_prop)
|
||||||
elif field_type == 'list':
|
elif field_type == 'list':
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
import os
|
import os
|
||||||
|
import json
|
||||||
import logging
|
import logging
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
import pillarsdk
|
import pillarsdk
|
||||||
from pillar import shortcodes
|
|
||||||
from pillarsdk import Node
|
from pillarsdk import Node
|
||||||
from pillarsdk import Project
|
from pillarsdk import Project
|
||||||
from pillarsdk.exceptions import ResourceNotFound
|
from pillarsdk.exceptions import ResourceNotFound
|
||||||
@ -17,12 +17,15 @@ from flask import request
|
|||||||
from flask import jsonify
|
from flask import jsonify
|
||||||
from flask import abort
|
from flask import abort
|
||||||
from flask_login import current_user
|
from flask_login import current_user
|
||||||
|
from flask_wtf.csrf import validate_csrf
|
||||||
|
|
||||||
import werkzeug.exceptions as wz_exceptions
|
import werkzeug.exceptions as wz_exceptions
|
||||||
from wtforms import SelectMultipleField
|
from wtforms import SelectMultipleField
|
||||||
from flask_login import login_required
|
from flask_login import login_required
|
||||||
from jinja2.exceptions import TemplateNotFound
|
from jinja2.exceptions import TemplateNotFound
|
||||||
|
|
||||||
|
from pillar.api.utils.authorization import check_permissions
|
||||||
|
from pillar.web.utils import caching
|
||||||
from pillar.markdown import markdown
|
from pillar.markdown import markdown
|
||||||
from pillar.web.nodes.forms import get_node_form
|
from pillar.web.nodes.forms import get_node_form
|
||||||
from pillar.web.nodes.forms import process_node_form
|
from pillar.web.nodes.forms import process_node_form
|
||||||
@ -105,11 +108,6 @@ def view(node_id, extra_template_args: dict=None):
|
|||||||
|
|
||||||
node_type_name = node.node_type
|
node_type_name = node.node_type
|
||||||
|
|
||||||
if node_type_name == 'page':
|
|
||||||
# HACK: The 'edit node' page GETs this endpoint, but for pages it's plain wrong,
|
|
||||||
# so we just redirect to the correct URL.
|
|
||||||
return redirect(url_for_node(node=node))
|
|
||||||
|
|
||||||
if node_type_name == 'post' and not request.args.get('embed'):
|
if node_type_name == 'post' and not request.args.get('embed'):
|
||||||
# Posts shouldn't be shown at this route (unless viewed embedded, tipically
|
# Posts shouldn't be shown at this route (unless viewed embedded, tipically
|
||||||
# after an edit. Redirect to the correct one.
|
# after an edit. Redirect to the correct one.
|
||||||
@ -489,14 +487,11 @@ def preview_markdown():
|
|||||||
current_app.csrf.protect()
|
current_app.csrf.protect()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
content = request.json['content']
|
content = request.form['content']
|
||||||
except KeyError:
|
except KeyError:
|
||||||
return jsonify({'_status': 'ERR',
|
return jsonify({'_status': 'ERR',
|
||||||
'message': 'The field "content" was not specified.'}), 400
|
'message': 'The field "content" was not specified.'}), 400
|
||||||
html = markdown(content)
|
return jsonify(content=markdown(content))
|
||||||
attachmentsdict = request.json.get('attachments', {})
|
|
||||||
html = shortcodes.render_commented(html, context={'attachments': attachmentsdict})
|
|
||||||
return jsonify(content=html)
|
|
||||||
|
|
||||||
|
|
||||||
def ensure_lists_exist_as_empty(node_doc, node_type):
|
def ensure_lists_exist_as_empty(node_doc, node_type):
|
||||||
@ -609,94 +604,5 @@ def url_for_node(node_id=None, node=None):
|
|||||||
return finders.find_url_for_node(node)
|
return finders.find_url_for_node(node)
|
||||||
|
|
||||||
|
|
||||||
@blueprint.route("/<node_id>/breadcrumbs")
|
|
||||||
def breadcrumbs(node_id: str):
|
|
||||||
"""Return breadcrumbs for the given node, as JSON.
|
|
||||||
|
|
||||||
Note that a missing parent is still returned in the breadcrumbs,
|
|
||||||
but with `{_exists: false, name: '-unknown-'}`.
|
|
||||||
|
|
||||||
The breadcrumbs start with the top-level parent, and end with the node
|
|
||||||
itself (marked by {_self: true}). Returns JSON like this:
|
|
||||||
|
|
||||||
{breadcrumbs: [
|
|
||||||
...,
|
|
||||||
{_id: "parentID",
|
|
||||||
name: "The Parent Node",
|
|
||||||
node_type: "group",
|
|
||||||
url: "/p/project/parentID"},
|
|
||||||
{_id: "deadbeefbeefbeefbeeffeee",
|
|
||||||
_self: true,
|
|
||||||
name: "The Node Itself",
|
|
||||||
node_type: "asset",
|
|
||||||
url: "/p/project/nodeID"},
|
|
||||||
]}
|
|
||||||
|
|
||||||
When a parent node is missing, it has a breadcrumb like this:
|
|
||||||
|
|
||||||
{_id: "deadbeefbeefbeefbeeffeee",
|
|
||||||
_exists': false,
|
|
||||||
name': '-unknown-'}
|
|
||||||
"""
|
|
||||||
|
|
||||||
api = system_util.pillar_api()
|
|
||||||
is_self = True
|
|
||||||
|
|
||||||
def make_crumb(some_node: None) -> dict:
|
|
||||||
"""Construct a breadcrumb for this node."""
|
|
||||||
nonlocal is_self
|
|
||||||
|
|
||||||
crumb = {
|
|
||||||
'_id': some_node._id,
|
|
||||||
'name': some_node.name,
|
|
||||||
'node_type': some_node.node_type,
|
|
||||||
'url': finders.find_url_for_node(some_node),
|
|
||||||
}
|
|
||||||
if is_self:
|
|
||||||
crumb['_self'] = True
|
|
||||||
is_self = False
|
|
||||||
return crumb
|
|
||||||
|
|
||||||
def make_missing_crumb(some_node_id: None) -> dict:
|
|
||||||
"""Construct 'missing parent' breadcrumb."""
|
|
||||||
|
|
||||||
return {
|
|
||||||
'_id': some_node_id,
|
|
||||||
'_exists': False,
|
|
||||||
'name': '-unknown-',
|
|
||||||
}
|
|
||||||
|
|
||||||
# The first node MUST exist.
|
|
||||||
try:
|
|
||||||
node = Node.find(node_id, api=api)
|
|
||||||
except ResourceNotFound:
|
|
||||||
log.warning('breadcrumbs(node_id=%r): Unable to find node', node_id)
|
|
||||||
raise wz_exceptions.NotFound(f'Unable to find node {node_id}')
|
|
||||||
except ForbiddenAccess:
|
|
||||||
log.warning('breadcrumbs(node_id=%r): access denied to current user', node_id)
|
|
||||||
raise wz_exceptions.Forbidden(f'No access to node {node_id}')
|
|
||||||
|
|
||||||
crumbs = []
|
|
||||||
while True:
|
|
||||||
crumbs.append(make_crumb(node))
|
|
||||||
|
|
||||||
child_id = node._id
|
|
||||||
node_id = node.parent
|
|
||||||
if not node_id:
|
|
||||||
break
|
|
||||||
|
|
||||||
# If a subsequent node doesn't exist any more, include that in the breadcrumbs.
|
|
||||||
# Forbidden nodes are handled as if they don't exist.
|
|
||||||
try:
|
|
||||||
node = Node.find(node_id, api=api)
|
|
||||||
except (ResourceNotFound, ForbiddenAccess):
|
|
||||||
log.warning('breadcrumbs: Unable to find node %r but it is marked as parent of %r',
|
|
||||||
node_id, child_id)
|
|
||||||
crumbs.append(make_missing_crumb(node_id))
|
|
||||||
break
|
|
||||||
|
|
||||||
return jsonify({'breadcrumbs': list(reversed(crumbs))})
|
|
||||||
|
|
||||||
|
|
||||||
# Import of custom modules (using the same nodes decorator)
|
# Import of custom modules (using the same nodes decorator)
|
||||||
from .custom import groups, storage, posts
|
from .custom import comments, groups, storage, posts
|
||||||
|
@ -6,8 +6,7 @@ from flask_login import current_user
|
|||||||
|
|
||||||
import pillar.flask_extra
|
import pillar.flask_extra
|
||||||
from pillar import current_app
|
from pillar import current_app
|
||||||
import pillar.api.users.avatar
|
from pillar.api.utils import authorization, str2id, gravatar, jsonify
|
||||||
from pillar.api.utils import authorization, str2id, jsonify
|
|
||||||
from pillar.web.system_util import pillar_api
|
from pillar.web.system_util import pillar_api
|
||||||
|
|
||||||
from pillarsdk import Organization, User
|
from pillarsdk import Organization, User
|
||||||
@ -48,7 +47,7 @@ def view_embed(organization_id: str):
|
|||||||
|
|
||||||
members = om.org_members(organization.members)
|
members = om.org_members(organization.members)
|
||||||
for member in members:
|
for member in members:
|
||||||
member['avatar'] = pillar.api.users.avatar.url(member)
|
member['avatar'] = gravatar(member.get('email'))
|
||||||
member['_id'] = str(member['_id'])
|
member['_id'] = str(member['_id'])
|
||||||
|
|
||||||
admin_user = User.find(organization.admin_uid, api=api)
|
admin_user = User.find(organization.admin_uid, api=api)
|
||||||
|
@ -30,7 +30,6 @@ class ProjectForm(FlaskForm):
|
|||||||
('deleted', 'Deleted')])
|
('deleted', 'Deleted')])
|
||||||
picture_header = FileSelectField('Picture header', file_format='image')
|
picture_header = FileSelectField('Picture header', file_format='image')
|
||||||
picture_square = FileSelectField('Picture square', file_format='image')
|
picture_square = FileSelectField('Picture square', file_format='image')
|
||||||
picture_16_9 = FileSelectField('Picture 16:9', file_format='image')
|
|
||||||
|
|
||||||
def validate(self):
|
def validate(self):
|
||||||
rv = FlaskForm.validate(self)
|
rv = FlaskForm.validate(self)
|
||||||
|
@ -22,10 +22,8 @@ import werkzeug.exceptions as wz_exceptions
|
|||||||
|
|
||||||
from pillar import current_app
|
from pillar import current_app
|
||||||
from pillar.api.utils import utcnow
|
from pillar.api.utils import utcnow
|
||||||
import pillar.api.users.avatar
|
|
||||||
from pillar.web import system_util
|
from pillar.web import system_util
|
||||||
from pillar.web import utils
|
from pillar.web import utils
|
||||||
from pillar.web.nodes import finders
|
|
||||||
from pillar.web.utils.jstree import jstree_get_children
|
from pillar.web.utils.jstree import jstree_get_children
|
||||||
import pillar.extension
|
import pillar.extension
|
||||||
|
|
||||||
@ -110,6 +108,7 @@ def index():
|
|||||||
|
|
||||||
return render_template(
|
return render_template(
|
||||||
'projects/index_dashboard.html',
|
'projects/index_dashboard.html',
|
||||||
|
gravatar=utils.gravatar(current_user.email, size=128),
|
||||||
projects_user=projects_user['_items'],
|
projects_user=projects_user['_items'],
|
||||||
projects_deleted=projects_deleted['_items'],
|
projects_deleted=projects_deleted['_items'],
|
||||||
projects_shared=projects_shared['_items'],
|
projects_shared=projects_shared['_items'],
|
||||||
@ -303,53 +302,9 @@ def view(project_url):
|
|||||||
'header_video_node': header_video_node})
|
'header_video_node': header_video_node})
|
||||||
|
|
||||||
|
|
||||||
def project_navigation_links(project: typing.Type[Project], api) -> list:
|
|
||||||
"""Returns a list of nodes for the project, for top navigation display.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
project: A Project object.
|
|
||||||
api: the api client credential.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
A list of links for the Project.
|
|
||||||
For example we display a link to the project blog if present, as well
|
|
||||||
as pages. The list is structured as follows:
|
|
||||||
|
|
||||||
[{'url': '/p/spring/about', 'label': 'About'},
|
|
||||||
{'url': '/p/spring/blog', 'label': 'Blog'}]
|
|
||||||
"""
|
|
||||||
|
|
||||||
links = []
|
|
||||||
|
|
||||||
# Fetch the blog
|
|
||||||
blog = Node.find_first({
|
|
||||||
'where': {'project': project._id, 'node_type': 'blog', '_deleted': {'$ne': True}},
|
|
||||||
'projection': {
|
|
||||||
'name': 1,
|
|
||||||
}
|
|
||||||
}, api=api)
|
|
||||||
|
|
||||||
if blog:
|
|
||||||
links.append({'url': finders.find_url_for_node(blog), 'label': blog.name, 'slug': 'blog'})
|
|
||||||
|
|
||||||
# Fetch pages
|
|
||||||
pages = Node.all({
|
|
||||||
'where': {'project': project._id, 'node_type': 'page', '_deleted': {'$ne': True}},
|
|
||||||
'projection': {
|
|
||||||
'name': 1,
|
|
||||||
'properties.url': 1
|
|
||||||
}
|
|
||||||
}, api=api)
|
|
||||||
|
|
||||||
# Process the results and append the links to the list
|
|
||||||
for p in pages._items:
|
|
||||||
links.append({'url': finders.find_url_for_node(p), 'label': p.name, 'slug': p.properties.url})
|
|
||||||
|
|
||||||
return links
|
|
||||||
|
|
||||||
|
|
||||||
def render_project(project, api, extra_context=None, template_name=None):
|
def render_project(project, api, extra_context=None, template_name=None):
|
||||||
utils.attach_project_pictures(project, api)
|
project.picture_square = utils.get_file(project.picture_square, api=api)
|
||||||
|
project.picture_header = utils.get_file(project.picture_header, api=api)
|
||||||
|
|
||||||
def load_latest(list_of_ids, node_type=None):
|
def load_latest(list_of_ids, node_type=None):
|
||||||
"""Loads a list of IDs in reversed order."""
|
"""Loads a list of IDs in reversed order."""
|
||||||
@ -360,7 +315,6 @@ def render_project(project, api, extra_context=None, template_name=None):
|
|||||||
# Construct query parameters outside the loop.
|
# Construct query parameters outside the loop.
|
||||||
projection = {'name': 1, 'user': 1, 'node_type': 1, 'project': 1,
|
projection = {'name': 1, 'user': 1, 'node_type': 1, 'project': 1,
|
||||||
'properties.url': 1, 'properties.content_type': 1,
|
'properties.url': 1, 'properties.content_type': 1,
|
||||||
'properties.duration_seconds': 1,
|
|
||||||
'picture': 1}
|
'picture': 1}
|
||||||
params = {'projection': projection, 'embedded': {'user': 1}}
|
params = {'projection': projection, 'embedded': {'user': 1}}
|
||||||
|
|
||||||
@ -402,6 +356,7 @@ def render_project(project, api, extra_context=None, template_name=None):
|
|||||||
template_name = template_name or 'projects/home_index.html'
|
template_name = template_name or 'projects/home_index.html'
|
||||||
return render_template(
|
return render_template(
|
||||||
template_name,
|
template_name,
|
||||||
|
gravatar=utils.gravatar(current_user.email, size=128),
|
||||||
project=project,
|
project=project,
|
||||||
api=system_util.pillar_api(),
|
api=system_util.pillar_api(),
|
||||||
**extra_context)
|
**extra_context)
|
||||||
@ -413,7 +368,6 @@ def render_project(project, api, extra_context=None, template_name=None):
|
|||||||
embed_string = ''
|
embed_string = ''
|
||||||
template_name = "projects/view{0}.html".format(embed_string)
|
template_name = "projects/view{0}.html".format(embed_string)
|
||||||
|
|
||||||
navigation_links = project_navigation_links(project, api)
|
|
||||||
extension_sidebar_links = current_app.extension_sidebar_links(project)
|
extension_sidebar_links = current_app.extension_sidebar_links(project)
|
||||||
|
|
||||||
return render_template(template_name,
|
return render_template(template_name,
|
||||||
@ -422,9 +376,8 @@ def render_project(project, api, extra_context=None, template_name=None):
|
|||||||
node=None,
|
node=None,
|
||||||
show_node=False,
|
show_node=False,
|
||||||
show_project=True,
|
show_project=True,
|
||||||
og_picture=project.picture_16_9,
|
og_picture=project.picture_header,
|
||||||
activity_stream=activity_stream,
|
activity_stream=activity_stream,
|
||||||
navigation_links=navigation_links,
|
|
||||||
extension_sidebar_links=extension_sidebar_links,
|
extension_sidebar_links=extension_sidebar_links,
|
||||||
**extra_context)
|
**extra_context)
|
||||||
|
|
||||||
@ -463,7 +416,6 @@ def view_node(project_url, node_id):
|
|||||||
api = system_util.pillar_api()
|
api = system_util.pillar_api()
|
||||||
# First we check if it's a simple string, in which case we are looking for
|
# First we check if it's a simple string, in which case we are looking for
|
||||||
# a static page. Maybe we could use bson.objectid.ObjectId.is_valid(node_id)
|
# a static page. Maybe we could use bson.objectid.ObjectId.is_valid(node_id)
|
||||||
project: typing.Optional[Project] = None
|
|
||||||
if not utils.is_valid_id(node_id):
|
if not utils.is_valid_id(node_id):
|
||||||
# raise wz_exceptions.NotFound('No such node')
|
# raise wz_exceptions.NotFound('No such node')
|
||||||
project, node = render_node_page(project_url, node_id, api)
|
project, node = render_node_page(project_url, node_id, api)
|
||||||
@ -481,33 +433,34 @@ def view_node(project_url, node_id):
|
|||||||
project = Project.find_one({'where': {"url": project_url, '_id': node.project}},
|
project = Project.find_one({'where': {"url": project_url, '_id': node.project}},
|
||||||
api=api)
|
api=api)
|
||||||
except ResourceNotFound:
|
except ResourceNotFound:
|
||||||
|
# In theatre mode, we don't need access to the project at all.
|
||||||
if theatre_mode:
|
if theatre_mode:
|
||||||
pass # In theatre mode, we don't need access to the project at all.
|
project = None
|
||||||
else:
|
else:
|
||||||
raise wz_exceptions.NotFound('No such project')
|
raise wz_exceptions.NotFound('No such project')
|
||||||
|
|
||||||
navigation_links = []
|
|
||||||
extension_sidebar_links = ''
|
|
||||||
og_picture = node.picture = utils.get_file(node.picture, api=api)
|
og_picture = node.picture = utils.get_file(node.picture, api=api)
|
||||||
if project:
|
if project:
|
||||||
utils.attach_project_pictures(project, api)
|
|
||||||
if not node.picture:
|
if not node.picture:
|
||||||
og_picture = project.picture_16_9
|
og_picture = utils.get_file(project.picture_header, api=api)
|
||||||
navigation_links = project_navigation_links(project, api)
|
project.picture_square = utils.get_file(project.picture_square, api=api)
|
||||||
extension_sidebar_links = current_app.extension_sidebar_links(project)
|
|
||||||
|
|
||||||
# Append _theatre to load the proper template
|
# Append _theatre to load the proper template
|
||||||
theatre = '_theatre' if theatre_mode else ''
|
theatre = '_theatre' if theatre_mode else ''
|
||||||
|
|
||||||
if node.node_type == 'page':
|
if node.node_type == 'page':
|
||||||
|
pages = Node.all({
|
||||||
|
'where': {'project': project._id, 'node_type': 'page'},
|
||||||
|
'projection': {'name': 1}}, api=api)
|
||||||
return render_template('nodes/custom/page/view_embed.html',
|
return render_template('nodes/custom/page/view_embed.html',
|
||||||
api=api,
|
api=api,
|
||||||
node=node,
|
node=node,
|
||||||
project=project,
|
project=project,
|
||||||
navigation_links=navigation_links,
|
pages=pages._items,
|
||||||
extension_sidebar_links=extension_sidebar_links,
|
|
||||||
og_picture=og_picture,)
|
og_picture=og_picture,)
|
||||||
|
|
||||||
|
extension_sidebar_links = current_app.extension_sidebar_links(project)
|
||||||
|
|
||||||
return render_template('projects/view{}.html'.format(theatre),
|
return render_template('projects/view{}.html'.format(theatre),
|
||||||
api=api,
|
api=api,
|
||||||
project=project,
|
project=project,
|
||||||
@ -515,8 +468,7 @@ def view_node(project_url, node_id):
|
|||||||
show_node=True,
|
show_node=True,
|
||||||
show_project=False,
|
show_project=False,
|
||||||
og_picture=og_picture,
|
og_picture=og_picture,
|
||||||
navigation_links=navigation_links,
|
extension_sidebar_links=extension_sidebar_links)
|
||||||
extension_sidebar_links=extension_sidebar_links,)
|
|
||||||
|
|
||||||
|
|
||||||
def find_project_or_404(project_url, embedded=None, api=None):
|
def find_project_or_404(project_url, embedded=None, api=None):
|
||||||
@ -539,7 +491,8 @@ def search(project_url):
|
|||||||
"""Search into a project"""
|
"""Search into a project"""
|
||||||
api = system_util.pillar_api()
|
api = system_util.pillar_api()
|
||||||
project = find_project_or_404(project_url, api=api)
|
project = find_project_or_404(project_url, api=api)
|
||||||
utils.attach_project_pictures(project, api)
|
project.picture_square = utils.get_file(project.picture_square, api=api)
|
||||||
|
project.picture_header = utils.get_file(project.picture_header, api=api)
|
||||||
|
|
||||||
return render_template('nodes/search.html',
|
return render_template('nodes/search.html',
|
||||||
project=project,
|
project=project,
|
||||||
@ -580,8 +533,6 @@ def edit(project_url):
|
|||||||
project.picture_square = form.picture_square.data
|
project.picture_square = form.picture_square.data
|
||||||
if form.picture_header.data:
|
if form.picture_header.data:
|
||||||
project.picture_header = form.picture_header.data
|
project.picture_header = form.picture_header.data
|
||||||
if form.picture_16_9.data:
|
|
||||||
project.picture_16_9 = form.picture_16_9.data
|
|
||||||
|
|
||||||
# Update world permissions from is_private checkbox
|
# Update world permissions from is_private checkbox
|
||||||
if form.is_private.data:
|
if form.is_private.data:
|
||||||
@ -597,8 +548,6 @@ def edit(project_url):
|
|||||||
form.picture_square.data = project.picture_square._id
|
form.picture_square.data = project.picture_square._id
|
||||||
if project.picture_header:
|
if project.picture_header:
|
||||||
form.picture_header.data = project.picture_header._id
|
form.picture_header.data = project.picture_header._id
|
||||||
if project.picture_16_9:
|
|
||||||
form.picture_16_9.data = project.picture_16_9._id
|
|
||||||
|
|
||||||
# List of fields from the form that should be hidden to regular users
|
# List of fields from the form that should be hidden to regular users
|
||||||
if current_user.has_role('admin'):
|
if current_user.has_role('admin'):
|
||||||
@ -707,12 +656,15 @@ def sharing(project_url):
|
|||||||
api = system_util.pillar_api()
|
api = system_util.pillar_api()
|
||||||
# Fetch the project or 404
|
# Fetch the project or 404
|
||||||
try:
|
try:
|
||||||
project = Project.find_one({'where': {'url': project_url}}, api=api)
|
project = Project.find_one({
|
||||||
|
'where': '{"url" : "%s"}' % (project_url)}, api=api)
|
||||||
except ResourceNotFound:
|
except ResourceNotFound:
|
||||||
return abort(404)
|
return abort(404)
|
||||||
|
|
||||||
# Fetch users that are part of the admin group
|
# Fetch users that are part of the admin group
|
||||||
users = project.get_users(api=api)
|
users = project.get_users(api=api)
|
||||||
|
for user in users['_items']:
|
||||||
|
user['avatar'] = utils.gravatar(user['email'])
|
||||||
|
|
||||||
if request.method == 'POST':
|
if request.method == 'POST':
|
||||||
user_id = request.form['user_id']
|
user_id = request.form['user_id']
|
||||||
@ -722,14 +674,13 @@ def sharing(project_url):
|
|||||||
user = project.add_user(user_id, api=api)
|
user = project.add_user(user_id, api=api)
|
||||||
elif action == 'remove':
|
elif action == 'remove':
|
||||||
user = project.remove_user(user_id, api=api)
|
user = project.remove_user(user_id, api=api)
|
||||||
else:
|
|
||||||
raise wz_exceptions.BadRequest(f'invalid action {action}')
|
|
||||||
except ResourceNotFound:
|
except ResourceNotFound:
|
||||||
log.info('/p/%s/edit/sharing: User %s not found', project_url, user_id)
|
log.info('/p/%s/edit/sharing: User %s not found', project_url, user_id)
|
||||||
return jsonify({'_status': 'ERROR',
|
return jsonify({'_status': 'ERROR',
|
||||||
'message': 'User %s not found' % user_id}), 404
|
'message': 'User %s not found' % user_id}), 404
|
||||||
|
|
||||||
user['avatar'] = pillar.api.users.avatar.url(user)
|
# Add gravatar to user
|
||||||
|
user['avatar'] = utils.gravatar(user['email'])
|
||||||
return jsonify(user)
|
return jsonify(user)
|
||||||
|
|
||||||
utils.attach_project_pictures(project, api)
|
utils.attach_project_pictures(project, api)
|
||||||
|
@ -1,18 +1,13 @@
|
|||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import urllib.parse
|
|
||||||
|
|
||||||
from flask import Blueprint, flash, render_template
|
from flask import Blueprint, flash, render_template
|
||||||
from flask_login import login_required
|
from flask_login import login_required, current_user
|
||||||
from werkzeug.exceptions import abort
|
from werkzeug.exceptions import abort
|
||||||
|
|
||||||
from pillar import current_app
|
|
||||||
from pillar.api.utils import jsonify
|
|
||||||
import pillar.api.users.avatar
|
|
||||||
from pillar.auth import current_user
|
|
||||||
from pillar.web import system_util
|
from pillar.web import system_util
|
||||||
from pillar.web.users import forms
|
from pillar.web.users import forms
|
||||||
from pillarsdk import File, User, exceptions as sdk_exceptions
|
from pillarsdk import User, exceptions as sdk_exceptions
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
blueprint = Blueprint('settings', __name__)
|
blueprint = Blueprint('settings', __name__)
|
||||||
@ -32,20 +27,14 @@ def profile():
|
|||||||
|
|
||||||
if form.validate_on_submit():
|
if form.validate_on_submit():
|
||||||
try:
|
try:
|
||||||
response = user.set_username(form.username.data, api=api)
|
user.username = form.username.data
|
||||||
log.info('updated username of %s: %s', current_user, response)
|
user.update(api=api)
|
||||||
flash("Profile updated", 'success')
|
flash("Profile updated", 'success')
|
||||||
except sdk_exceptions.ResourceInvalid as ex:
|
except sdk_exceptions.ResourceInvalid as e:
|
||||||
log.warning('unable to set username %s to %r: %s', current_user, form.username.data, ex)
|
message = json.loads(e.content)
|
||||||
message = json.loads(ex.content)
|
|
||||||
flash(message)
|
flash(message)
|
||||||
|
|
||||||
blender_id_endpoint = current_app.config['BLENDER_ID_ENDPOINT']
|
return render_template('users/settings/profile.html', form=form, title='profile')
|
||||||
blender_profile_url = urllib.parse.urljoin(blender_id_endpoint, 'settings/profile')
|
|
||||||
|
|
||||||
return render_template('users/settings/profile.html',
|
|
||||||
form=form, title='profile',
|
|
||||||
blender_profile_url=blender_profile_url)
|
|
||||||
|
|
||||||
|
|
||||||
@blueprint.route('/roles')
|
@blueprint.route('/roles')
|
||||||
@ -53,19 +42,3 @@ def profile():
|
|||||||
def roles():
|
def roles():
|
||||||
"""Show roles and capabilties of the current user."""
|
"""Show roles and capabilties of the current user."""
|
||||||
return render_template('users/settings/roles.html', title='roles')
|
return render_template('users/settings/roles.html', title='roles')
|
||||||
|
|
||||||
|
|
||||||
@blueprint.route('/profile/sync-avatar', methods=['POST'])
|
|
||||||
@login_required
|
|
||||||
def sync_avatar():
|
|
||||||
"""Fetch the user's avatar from Blender ID and save to storage.
|
|
||||||
|
|
||||||
This is an API-like endpoint, in the sense that it returns JSON.
|
|
||||||
It's here in this file to have it close to the endpoint that
|
|
||||||
serves the only page that calls on this endpoint.
|
|
||||||
"""
|
|
||||||
|
|
||||||
new_url = pillar.api.users.avatar.sync_avatar(current_user.user_id)
|
|
||||||
if not new_url:
|
|
||||||
return jsonify({'_message': 'Your avatar could not be updated'})
|
|
||||||
return new_url
|
|
||||||
|
14
pillar/web/static/assets/css/vendor/bootstrap.min.css
vendored
Normal file
14
pillar/web/static/assets/css/vendor/bootstrap.min.css
vendored
Normal file
File diff suppressed because one or more lines are too long
@ -872,6 +872,12 @@
|
|||||||
"code": 61930,
|
"code": 61930,
|
||||||
"src": "fontawesome"
|
"src": "fontawesome"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"uid": "31972e4e9d080eaa796290349ae6c1fd",
|
||||||
|
"css": "users",
|
||||||
|
"code": 59502,
|
||||||
|
"src": "fontawesome"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"uid": "c8585e1e5b0467f28b70bce765d5840c",
|
"uid": "c8585e1e5b0467f28b70bce765d5840c",
|
||||||
"css": "clipboard-copy",
|
"css": "clipboard-copy",
|
||||||
@ -984,30 +990,6 @@
|
|||||||
"code": 59394,
|
"code": 59394,
|
||||||
"src": "entypo"
|
"src": "entypo"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"uid": "347c38a8b96a509270fdcabc951e7571",
|
|
||||||
"css": "database",
|
|
||||||
"code": 61888,
|
|
||||||
"src": "fontawesome"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"uid": "3a6f0140c3a390bdb203f56d1bfdefcb",
|
|
||||||
"css": "speed",
|
|
||||||
"code": 59471,
|
|
||||||
"src": "entypo"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"uid": "4c1ef492f1d2c39a2250ae457cee2a6e",
|
|
||||||
"css": "social-instagram",
|
|
||||||
"code": 61805,
|
|
||||||
"src": "fontawesome"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"uid": "e36d581e4f2844db345bddc205d15dda",
|
|
||||||
"css": "users",
|
|
||||||
"code": 59507,
|
|
||||||
"src": "elusive"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"uid": "053a214a098a9453877363eeb45f004e",
|
"uid": "053a214a098a9453877363eeb45f004e",
|
||||||
"css": "log-in",
|
"css": "log-in",
|
||||||
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Before Width: | Height: | Size: 496 B |
1
pillar/web/static/assets/js/vendor/jquery.montage.min.js
vendored
Normal file
1
pillar/web/static/assets/js/vendor/jquery.montage.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
8
pillar/web/static/assets/js/vendor/videojs-6.2.8.min.js
vendored
Normal file
8
pillar/web/static/assets/js/vendor/videojs-6.2.8.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
@ -33,8 +33,7 @@ def get_user_info(user_id):
|
|||||||
# TODO: put those fields into a config var or module-level global.
|
# TODO: put those fields into a config var or module-level global.
|
||||||
return {'email': user.email,
|
return {'email': user.email,
|
||||||
'full_name': user.full_name,
|
'full_name': user.full_name,
|
||||||
'username': user.username,
|
'username': user.username}
|
||||||
'badges_html': (user.badges and user.badges.html) or ''}
|
|
||||||
|
|
||||||
|
|
||||||
def setup_app(app):
|
def setup_app(app):
|
||||||
|
@ -31,10 +31,8 @@ def check_oauth_provider(provider):
|
|||||||
|
|
||||||
@blueprint.route('/authorize/<provider>')
|
@blueprint.route('/authorize/<provider>')
|
||||||
def oauth_authorize(provider):
|
def oauth_authorize(provider):
|
||||||
if current_user.is_authenticated:
|
if not current_user.is_anonymous:
|
||||||
next_after_login = session.pop('next_after_login', None) or url_for('main.homepage')
|
return redirect(url_for('main.homepage'))
|
||||||
log.debug('Redirecting user to %s', next_after_login)
|
|
||||||
return redirect(next_after_login)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
oauth = OAuthSignIn.get_provider(provider)
|
oauth = OAuthSignIn.get_provider(provider)
|
||||||
@ -50,14 +48,8 @@ def oauth_authorize(provider):
|
|||||||
|
|
||||||
@blueprint.route('/oauth/<provider>/authorized')
|
@blueprint.route('/oauth/<provider>/authorized')
|
||||||
def oauth_callback(provider):
|
def oauth_callback(provider):
|
||||||
import datetime
|
|
||||||
from pillar.api.utils.authentication import store_token
|
|
||||||
from pillar.api.utils import utcnow
|
|
||||||
|
|
||||||
next_after_login = session.pop('next_after_login', None) or url_for('main.homepage')
|
|
||||||
if current_user.is_authenticated:
|
if current_user.is_authenticated:
|
||||||
log.debug('Redirecting user to %s', next_after_login)
|
return redirect(url_for('main.homepage'))
|
||||||
return redirect(next_after_login)
|
|
||||||
|
|
||||||
oauth = OAuthSignIn.get_provider(provider)
|
oauth = OAuthSignIn.get_provider(provider)
|
||||||
try:
|
try:
|
||||||
@ -67,22 +59,12 @@ def oauth_callback(provider):
|
|||||||
raise wz_exceptions.Forbidden()
|
raise wz_exceptions.Forbidden()
|
||||||
if oauth_user.id is None:
|
if oauth_user.id is None:
|
||||||
log.debug('Authentication failed for user with {}'.format(provider))
|
log.debug('Authentication failed for user with {}'.format(provider))
|
||||||
return redirect(next_after_login)
|
return redirect(url_for('main.homepage'))
|
||||||
|
|
||||||
# Find or create user
|
# Find or create user
|
||||||
user_info = {'id': oauth_user.id, 'email': oauth_user.email, 'full_name': ''}
|
user_info = {'id': oauth_user.id, 'email': oauth_user.email, 'full_name': ''}
|
||||||
db_user = find_user_in_db(user_info, provider=provider)
|
db_user = find_user_in_db(user_info, provider=provider)
|
||||||
db_id, status = upsert_user(db_user)
|
db_id, status = upsert_user(db_user)
|
||||||
|
|
||||||
# TODO(Sybren): If the user doesn't have any badges, but the access token
|
|
||||||
# does have 'badge' scope, we should fetch the badges in the background.
|
|
||||||
|
|
||||||
if oauth_user.access_token:
|
|
||||||
# TODO(Sybren): make nr of days configurable, or get from OAuthSignIn subclass.
|
|
||||||
token_expiry = utcnow() + datetime.timedelta(days=15)
|
|
||||||
token = store_token(db_id, oauth_user.access_token, token_expiry,
|
|
||||||
oauth_scopes=oauth_user.scopes)
|
|
||||||
else:
|
|
||||||
token = generate_and_store_token(db_id)
|
token = generate_and_store_token(db_id)
|
||||||
|
|
||||||
# Login user
|
# Login user
|
||||||
@ -92,8 +74,11 @@ def oauth_callback(provider):
|
|||||||
# Check with Blender ID to update certain user roles.
|
# Check with Blender ID to update certain user roles.
|
||||||
update_subscription()
|
update_subscription()
|
||||||
|
|
||||||
|
next_after_login = session.pop('next_after_login', None)
|
||||||
|
if next_after_login:
|
||||||
log.debug('Redirecting user to %s', next_after_login)
|
log.debug('Redirecting user to %s', next_after_login)
|
||||||
return redirect(next_after_login)
|
return redirect(next_after_login)
|
||||||
|
return redirect(url_for('main.homepage'))
|
||||||
|
|
||||||
|
|
||||||
@blueprint.route('/login')
|
@blueprint.route('/login')
|
||||||
|
@ -43,38 +43,8 @@ def attach_project_pictures(project, api):
|
|||||||
This function should be moved in the API, attached to a new Project object.
|
This function should be moved in the API, attached to a new Project object.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# When adding to the list of pictures dealt with here, make sure
|
|
||||||
# you update unattach_project_pictures() too.
|
|
||||||
project.picture_square = get_file(project.picture_square, api=api)
|
project.picture_square = get_file(project.picture_square, api=api)
|
||||||
project.picture_header = get_file(project.picture_header, api=api)
|
project.picture_header = get_file(project.picture_header, api=api)
|
||||||
project.picture_16_9 = get_file(project.picture_16_9, api=api)
|
|
||||||
|
|
||||||
|
|
||||||
def unattach_project_pictures(project: dict):
|
|
||||||
"""Reverts the operation of 'attach_project_pictures'.
|
|
||||||
|
|
||||||
This makes it possible to PUT the project again.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def unattach(property_name: str):
|
|
||||||
picture_info = project.get(property_name, None)
|
|
||||||
if not picture_info:
|
|
||||||
project.pop(property_name, None)
|
|
||||||
return
|
|
||||||
|
|
||||||
if not isinstance(picture_info, dict):
|
|
||||||
# Assume it's already is an ID.
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
picture_id = picture_info['_id']
|
|
||||||
project[property_name] = picture_id
|
|
||||||
except KeyError:
|
|
||||||
return
|
|
||||||
|
|
||||||
unattach('picture_square')
|
|
||||||
unattach('picture_header')
|
|
||||||
unattach('picture_16_9')
|
|
||||||
|
|
||||||
|
|
||||||
def mass_attach_project_pictures(projects: typing.Iterable[pillarsdk.Project], *,
|
def mass_attach_project_pictures(projects: typing.Iterable[pillarsdk.Project], *,
|
||||||
@ -136,16 +106,9 @@ def mass_attach_project_pictures(projects: typing.Iterable[pillarsdk.Project], *
|
|||||||
|
|
||||||
|
|
||||||
def gravatar(email: str, size=64):
|
def gravatar(email: str, size=64):
|
||||||
"""Deprecated: return the Gravatar URL.
|
|
||||||
|
|
||||||
.. deprecated::
|
|
||||||
Use of Gravatar is deprecated, in favour of our self-hosted avatars.
|
|
||||||
See pillar.api.users.avatar.url(user).
|
|
||||||
"""
|
|
||||||
import warnings
|
import warnings
|
||||||
warnings.warn('pillar.web.utils.gravatar() is deprecated, '
|
warnings.warn("the pillar.web.gravatar function is deprecated; use hashlib instead",
|
||||||
'use pillar.api.users.avatar.url() instead',
|
DeprecationWarning, 2)
|
||||||
category=DeprecationWarning, stacklevel=2)
|
|
||||||
|
|
||||||
from pillar.api.utils import gravatar as api_gravatar
|
from pillar.api.utils import gravatar as api_gravatar
|
||||||
return api_gravatar(email, size)
|
return api_gravatar(email, size)
|
||||||
|
@ -62,7 +62,7 @@ def jstree_get_children(node_id, project_id=None):
|
|||||||
'where': {
|
'where': {
|
||||||
'$and': [
|
'$and': [
|
||||||
{'node_type': {'$regex': '^(?!attract_)'}},
|
{'node_type': {'$regex': '^(?!attract_)'}},
|
||||||
{'node_type': {'$not': {'$in': ['comment', 'post', 'blog', 'page']}}},
|
{'node_type': {'$not': {'$in': ['comment', 'post']}}},
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,64 +0,0 @@
|
|||||||
[tool.poetry]
|
|
||||||
name = "pillar"
|
|
||||||
version = "2.0"
|
|
||||||
description = ""
|
|
||||||
authors = [
|
|
||||||
"Francesco Siddi <francesco@blender.org>",
|
|
||||||
"Pablo Vazquez <pablo@blender.studio>",
|
|
||||||
"Sybren Stüvel <sybren@blender.studio>",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
[tool.poetry.scripts]
|
|
||||||
# Must be run after installing/updating:
|
|
||||||
translations = 'pillar.cli.translations:main'
|
|
||||||
|
|
||||||
|
|
||||||
[tool.poetry.dependencies]
|
|
||||||
python = "~3.6"
|
|
||||||
attrs = "~19"
|
|
||||||
algoliasearch = "~1"
|
|
||||||
bcrypt = "~3"
|
|
||||||
blinker = "~1.4"
|
|
||||||
bleach = "~3.1"
|
|
||||||
celery = {version = "~4.3",extras = ["redis"]}
|
|
||||||
cryptography = "2.7"
|
|
||||||
commonmark = "~0.9"
|
|
||||||
|
|
||||||
# These must match the version of ElasticSearch used:
|
|
||||||
elasticsearch = "~6.1"
|
|
||||||
elasticsearch-dsl = "~6.1"
|
|
||||||
|
|
||||||
Eve = "~0.9"
|
|
||||||
Flask = "~1.0"
|
|
||||||
Flask-Babel = "~0.12"
|
|
||||||
Flask-Caching = "~1.7"
|
|
||||||
Flask-DebugToolbar = "~0.10"
|
|
||||||
Flask-Script = "~2.0"
|
|
||||||
Flask-Login = "~0.4"
|
|
||||||
Flask-WTF = "~0.14"
|
|
||||||
gcloud = "~0.18"
|
|
||||||
google-apitools = "~0.5"
|
|
||||||
IPy = "~1.00"
|
|
||||||
MarkupSafe = "~1.1"
|
|
||||||
ndg-httpsclient = "~0.5"
|
|
||||||
Pillow = "~6.0"
|
|
||||||
python-dateutil = "~2.8"
|
|
||||||
rauth = "~0.7"
|
|
||||||
raven = {version = "~6.10",extras = ["flask"]}
|
|
||||||
redis = "~3.2"
|
|
||||||
shortcodes = "~2.5"
|
|
||||||
zencoder = "~0.6"
|
|
||||||
pillarsdk = {path = "../pillar-python-sdk"}
|
|
||||||
|
|
||||||
# Secondary requirements that weren't installed automatically:
|
|
||||||
idna = "~2.8"
|
|
||||||
|
|
||||||
|
|
||||||
[tool.poetry.dev-dependencies]
|
|
||||||
pillar-devdeps = {path = "./devdeps"}
|
|
||||||
|
|
||||||
|
|
||||||
[build-system]
|
|
||||||
requires = ["poetry==1.0","cryptography==2.7","setuptools==51.0.0","wheel==0.35.1"]
|
|
||||||
build-backend = "poetry.masonry.api"
|
|
17
requirements-dev.txt
Normal file
17
requirements-dev.txt
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
-r requirements.txt
|
||||||
|
-r ../pillar-python-sdk/requirements-dev.txt
|
||||||
|
-e ../pillar # also works from parent project, like blender-cloud
|
||||||
|
|
||||||
|
# Development requirements
|
||||||
|
pytest==3.0.6
|
||||||
|
responses==0.5.1
|
||||||
|
pytest-cov==2.4.0
|
||||||
|
mock==2.0.0
|
||||||
|
mypy==0.501
|
||||||
|
|
||||||
|
# Secondary development requirements
|
||||||
|
cookies==2.2.1
|
||||||
|
coverage==4.3.4
|
||||||
|
pbr==2.0.0
|
||||||
|
py==1.4.32
|
||||||
|
typed-ast==1.0.2
|
63
requirements.txt
Normal file
63
requirements.txt
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
# Primary requirements
|
||||||
|
-r ../pillar-python-sdk/requirements.txt
|
||||||
|
|
||||||
|
attrs==16.2.0
|
||||||
|
algoliasearch==1.12.0
|
||||||
|
bcrypt==3.1.3
|
||||||
|
blinker==1.4
|
||||||
|
bleach==2.1.3
|
||||||
|
celery[redis]==4.0.2
|
||||||
|
CommonMark==0.7.2
|
||||||
|
elasticsearch==6.1.1
|
||||||
|
elasticsearch-dsl==6.1.0
|
||||||
|
Eve==0.7.3
|
||||||
|
Flask==0.12
|
||||||
|
Flask-Babel==0.11.2
|
||||||
|
Flask-Cache==0.13.1
|
||||||
|
Flask-Script==2.0.6
|
||||||
|
Flask-Login==0.3.2
|
||||||
|
Flask-WTF==0.14.2
|
||||||
|
gcloud==0.12.0
|
||||||
|
google-apitools==0.4.11
|
||||||
|
httplib2==0.9.2
|
||||||
|
IPy==0.83
|
||||||
|
MarkupSafe==0.23
|
||||||
|
ndg-httpsclient==0.4.0
|
||||||
|
Pillow==4.1.1
|
||||||
|
python-dateutil==2.5.3
|
||||||
|
rauth==0.7.3
|
||||||
|
raven[flask]==6.3.0
|
||||||
|
redis==2.10.5
|
||||||
|
shortcodes==2.5.0
|
||||||
|
WebOb==1.5.0
|
||||||
|
wheel==0.29.0
|
||||||
|
zencoder==0.6.5
|
||||||
|
|
||||||
|
# Secondary requirements
|
||||||
|
amqp==2.1.4
|
||||||
|
billiard==3.5.0.2
|
||||||
|
Flask-PyMongo==0.4.1
|
||||||
|
-e git+https://github.com/armadillica/cerberus.git@sybren-0.9#egg=Cerberus
|
||||||
|
Events==0.2.2
|
||||||
|
future==0.15.2
|
||||||
|
html5lib==0.99999999
|
||||||
|
googleapis-common-protos==1.1.0
|
||||||
|
itsdangerous==0.24
|
||||||
|
Jinja2==2.9.6
|
||||||
|
kombu==4.0.2
|
||||||
|
oauth2client==2.0.2
|
||||||
|
oauthlib==2.0.1
|
||||||
|
olefile==0.44
|
||||||
|
protobuf==3.0.0b2.post2
|
||||||
|
protorpc==0.11.1
|
||||||
|
pyasn1-modules==0.0.8
|
||||||
|
pymongo==3.4.0
|
||||||
|
pytz==2017.2
|
||||||
|
requests-oauthlib==0.7.0
|
||||||
|
rsa==3.4.2
|
||||||
|
simplejson==3.10.0
|
||||||
|
six==1.10.0
|
||||||
|
urllib3==1.22
|
||||||
|
vine==1.1.3
|
||||||
|
WTForms==2.1
|
||||||
|
Werkzeug==0.11.15
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user