Lazy Home: Lazy load latest blog posts and assets and group by week and

project.

Javascript tutti.js and timeline.js is needed, and then the following to
init the timeline:

$('.timeline')
    .timeline({
        url: '/api/timeline'
    });

# Javascript Notes:
## ES6 transpile:
* Files in src/scripts/js/es6/common will be transpiled from
modern es6 js to old es5 js, and then added to tutti.js
* Files in src/scripts/js/es6/individual will be transpiled from
modern es6 js to old es5 js to individual module files
## JS Testing
* Added the Jest test framework to write javascript tests.
* `npm test` will run all the javascript tests

Thanks to Sybren for reviewing
This commit is contained in:
Tobias Johansson 2018-11-12 12:57:25 +01:00
parent e2432f6e9f
commit 2990738b5d
20 changed files with 1358 additions and 6 deletions

3
.babelrc Normal file
View File

@ -0,0 +1,3 @@
{
"presets": ["@babel/preset-env"]
}

View File

@ -13,6 +13,13 @@ let rename = require('gulp-rename');
let sass = require('gulp-sass');
let sourcemaps = require('gulp-sourcemaps');
let uglify = require('gulp-uglify-es').default;
let browserify = require('browserify');
let babelify = require('babelify');
let sourceStream = require('vinyl-source-stream');
let glob = require('glob');
let es = require('event-stream');
let path = require('path');
let buffer = require('vinyl-buffer');
let enabled = {
uglify: argv.production,
@ -81,6 +88,41 @@ gulp.task('scripts', function(done) {
done();
});
function browserify_base(entry) {
return browserify({
entries: [entry],
standalone: 'pillar.' + path.basename(entry, '.js'),
})
.transform(babelify, { "presets": ["@babel/preset-env"] })
.bundle()
.pipe(gulpif(enabled.failCheck, plumber()))
.pipe(sourceStream(path.basename(entry)))
.pipe(buffer())
.pipe(rename({
extname: '.min.js'
}));
}
function browserify_common() {
return glob.sync('src/scripts/js/es6/common/*.js').map(browserify_base);
}
gulp.task('scripts_browserify', function(done) {
glob('src/scripts/js/es6/individual/*.js', function(err, files) {
if(err) done(err);
var tasks = files.map(function(entry) {
return browserify_base(entry)
.pipe(gulpif(enabled.maps, sourcemaps.init()))
.pipe(gulpif(enabled.uglify, uglify()))
.pipe(gulpif(enabled.maps, sourcemaps.write(".")))
.pipe(gulp.dest(destination.js));
});
es.merge(tasks).on('end', done);
})
});
/* Collection of scripts in src/scripts/tutti/ to merge into tutti.min.js
* Since it's always loaded, it's only for functions that we want site-wide.
@ -88,7 +130,7 @@ gulp.task('scripts', function(done) {
* the site doesn't work without it anyway.*/
gulp.task('scripts_concat_tutti', function(done) {
toUglify = [
let toUglify = [
source.jquery + 'dist/jquery.min.js',
source.popper + 'dist/umd/popper.min.js',
source.bootstrap + 'js/dist/index.js',
@ -100,7 +142,7 @@ gulp.task('scripts_concat_tutti', function(done) {
'src/scripts/tutti/**/*.js'
];
gulp.src(toUglify)
es.merge(gulp.src(toUglify), ...browserify_common())
.pipe(gulpif(enabled.failCheck, plumber()))
.pipe(gulpif(enabled.maps, sourcemaps.init()))
.pipe(concat("tutti.min.js"))
@ -137,7 +179,7 @@ gulp.task('watch',function(done) {
gulp.watch('src/templates/**/*.pug',gulp.series('templates'));
gulp.watch('src/scripts/*.js',gulp.series('scripts'));
gulp.watch('src/scripts/tutti/**/*.js',gulp.series('scripts_concat_tutti'));
gulp.watch('src/scripts/js/**/*.js',gulp.series(['scripts_browserify', 'scripts_concat_tutti']));
done();
});
@ -167,4 +209,5 @@ gulp.task('default', gulp.parallel(tasks.concat([
'scripts',
'scripts_concat_tutti',
'scripts_move_vendor',
'scripts_browserify',
])));

180
jest.config.js Normal file
View File

@ -0,0 +1,180 @@
// For a detailed explanation regarding each configuration property, visit:
// https://jestjs.io/docs/en/configuration.html
module.exports = {
// All imported modules in your tests should be mocked automatically
// automock: false,
// Stop running tests after the first failure
// bail: false,
// Respect "browser" field in package.json when resolving modules
// browser: false,
// The directory where Jest should store its cached dependency information
// cacheDirectory: "/tmp/jest_rs",
// Automatically clear mock calls and instances between every test
clearMocks: true,
// Indicates whether the coverage information should be collected while executing the test
// collectCoverage: false,
// An array of glob patterns indicating a set of files for which coverage information should be collected
// collectCoverageFrom: null,
// The directory where Jest should output its coverage files
// coverageDirectory: null,
// An array of regexp pattern strings used to skip coverage collection
// coveragePathIgnorePatterns: [
// "/node_modules/"
// ],
// A list of reporter names that Jest uses when writing coverage reports
// coverageReporters: [
// "json",
// "text",
// "lcov",
// "clover"
// ],
// An object that configures minimum threshold enforcement for coverage results
// coverageThreshold: null,
// Make calling deprecated APIs throw helpful error messages
// errorOnDeprecated: false,
// Force coverage collection from ignored files usin a array of glob patterns
// forceCoverageMatch: [],
// A path to a module which exports an async function that is triggered once before all test suites
// globalSetup: null,
// A path to a module which exports an async function that is triggered once after all test suites
// globalTeardown: null,
// A set of global variables that need to be available in all test environments
// globals: {},
// An array of directory names to be searched recursively up from the requiring module's location
// moduleDirectories: [
// "node_modules"
// ],
// An array of file extensions your modules use
// moduleFileExtensions: [
// "js",
// "json",
// "jsx",
// "node"
// ],
// A map from regular expressions to module names that allow to stub out resources with a single module
// moduleNameMapper: {},
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
// modulePathIgnorePatterns: [],
// Activates notifications for test results
// notify: false,
// An enum that specifies notification mode. Requires { notify: true }
// notifyMode: "always",
// A preset that is used as a base for Jest's configuration
// preset: null,
// Run tests from one or more projects
// projects: null,
// Use this configuration option to add custom reporters to Jest
// reporters: undefined,
// Automatically reset mock state between every test
// resetMocks: false,
// Reset the module registry before running each individual test
// resetModules: false,
// A path to a custom resolver
// resolver: null,
// Automatically restore mock state between every test
// restoreMocks: false,
// The root directory that Jest should scan for tests and modules within
// rootDir: null,
// A list of paths to directories that Jest should use to search for files in
// roots: [
// "<rootDir>"
// ],
// Allows you to use a custom runner instead of Jest's default test runner
// runner: "jest-runner",
// The paths to modules that run some code to configure or set up the testing environment before each test
setupFiles: ["<rootDir>/src/scripts/js/es6/test_config/test-env.js"],
// The path to a module that runs some code to configure or set up the testing framework before each test
// setupTestFrameworkScriptFile: null,
// A list of paths to snapshot serializer modules Jest should use for snapshot testing
// snapshotSerializers: [],
// The test environment that will be used for testing
testEnvironment: "jsdom",
// Options that will be passed to the testEnvironment
// testEnvironmentOptions: {},
// Adds a location field to test results
// testLocationInResults: false,
// The glob patterns Jest uses to detect test files
// testMatch: [
// "**/__tests__/**/*.js?(x)",
// "**/?(*.)+(spec|test).js?(x)"
// ],
// An array of regexp pattern strings that are matched against all test paths, matched tests are skipped
// testPathIgnorePatterns: [
// "/node_modules/"
// ],
// The regexp pattern Jest uses to detect test files
// testRegex: "",
// This option allows the use of a custom results processor
// testResultsProcessor: null,
// This option allows use of a custom test runner
// testRunner: "jasmine2",
// This option sets the URL for the jsdom environment. It is reflected in properties such as location.href
// testURL: "http://localhost",
// Setting this value to "fake" allows the use of fake timers for functions such as "setTimeout"
// timers: "real",
// A map from regular expressions to paths to transformers
// transform: null,
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
// transformIgnorePatterns: [
// "/node_modules/"
// ],
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
// unmockedModulePathPatterns: undefined,
// Indicates whether each individual test should be reported during the run
// verbose: null,
// An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode
// watchPathIgnorePatterns: [],
// Whether to use watchman for file crawling
// watchman: true,
};

View File

@ -7,8 +7,15 @@
"url": "git://git.blender.org/pillar.git"
},
"devDependencies": {
"@babel/core": "^7.1.2",
"@babel/preset-env": "^7.1.0",
"acorn": "^6.0.2",
"babel-core": "^7.0.0-bridge.0",
"babelify": "^10.0.0",
"browserify": "^16.2.3",
"gulp": "^4.0.0",
"gulp-autoprefixer": "^6.0.0",
"gulp-babel": "^8.0.0",
"gulp-cached": "^1.1.1",
"gulp-chmod": "^2.0.0",
"gulp-concat": "^2.6.1",
@ -21,12 +28,17 @@
"gulp-sass": "^4.0.1",
"gulp-sourcemaps": "^2.6.4",
"gulp-uglify-es": "^1.0.4",
"minimist": "^1.2.0"
"jest": "^23.6.0",
"minimist": "^1.2.0",
"vinyl-source-stream": "^2.0.0"
},
"dependencies": {
"bootstrap": "^4.1.3",
"jquery": "^3.3.1",
"popper.js": "^1.14.4",
"video.js": "^7.2.2"
},
"scripts": {
"test": "jest"
}
}

View File

@ -1,6 +1,6 @@
def setup_app(app):
from . import encoding, blender_id, projects, local_auth, file_storage
from . import users, nodes, latest, blender_cloud, service, activities
from . import users, nodes, latest, blender_cloud, service, activities, timeline
from . import organizations
from . import search
@ -11,6 +11,7 @@ def setup_app(app):
local_auth.setup_app(app, url_prefix='/auth')
file_storage.setup_app(app, url_prefix='/storage')
latest.setup_app(app, url_prefix='/latest')
timeline.setup_app(app, url_prefix='/timeline')
blender_cloud.setup_app(app, url_prefix='/bcloud')
users.setup_app(app, api_prefix='/users')
service.setup_app(app, api_prefix='/service')

360
pillar/api/timeline.py Normal file
View File

@ -0,0 +1,360 @@
import itertools
import typing
from datetime import datetime
from operator import itemgetter
import attr
import bson
import pymongo
from flask import Blueprint, current_app, request, url_for
from pillar.api.utils import jsonify, pretty_duration, str2id
from pillar.web.utils import pretty_date
blueprint = Blueprint('timeline', __name__)
@attr.s(auto_attribs=True)
class TimelineDO:
groups: typing.List['GroupDO'] = []
continue_from: typing.Optional[float] = None
@attr.s(auto_attribs=True)
class GroupDO:
label: typing.Optional[str] = None
url: typing.Optional[str] = None
items: typing.Dict = {}
groups: typing.Iterable['GroupDO'] = []
class SearchHelper:
def __init__(self, nbr_of_weeks: int, continue_from: typing.Optional[datetime],
project_ids: typing.List[bson.ObjectId], sort_direction: str):
self._nbr_of_weeks = nbr_of_weeks
self._continue_from = continue_from
self._project_ids = project_ids
self.sort_direction = sort_direction
def _match(self, continue_from: typing.Optional[datetime]) -> dict:
created = {}
if continue_from:
if self.sort_direction == 'desc':
created = {'_created': {'$lt': continue_from}}
else:
created = {'_created': {'$gt': continue_from}}
return {'_deleted': {'$ne': True},
'node_type': {'$in': ['asset', 'post']},
'project': {'$in': self._project_ids},
**created,
}
def raw_weeks_from_mongo(self) -> pymongo.collection.Collection:
direction = pymongo.DESCENDING if self.sort_direction == 'desc' else pymongo.ASCENDING
nodes_coll = current_app.db('nodes')
return nodes_coll.aggregate([
{'$match': self._match(self._continue_from)},
{'$lookup': {"from": "projects",
"localField": "project",
"foreignField": "_id",
"as": "project"}},
{'$unwind': {'path': "$project"}},
{'$lookup': {"from": "users",
"localField": "user",
"foreignField": "_id",
"as": "user"}},
{'$unwind': {'path': "$user"}},
{'$project': {
'_created': 1,
'project._id': 1,
'project.url': 1,
'project.name': 1,
'user._id': 1,
'user.full_name': 1,
'name': 1,
'node_type': 1,
'picture': 1,
'properties': 1,
'permissions': 1,
}},
{'$group': {
'_id': {'year': {'$isoWeekYear': '$_created'},
'week': {'$isoWeek': '$_created'}},
'nodes': {'$push': '$$ROOT'}
}},
{'$sort': {'_id.year': direction,
'_id.week': direction}},
{'$limit': self._nbr_of_weeks}
])
def has_more(self, continue_from: datetime) -> bool:
nodes_coll = current_app.db('nodes')
result = nodes_coll.count(self._match(continue_from))
return bool(result)
class Grouper:
@classmethod
def label(cls, node):
return None
@classmethod
def url(cls, node):
return None
@classmethod
def group_key(cls) -> typing.Callable[[dict], typing.Any]:
raise NotImplemented()
@classmethod
def sort_key(cls) -> typing.Callable[[dict], typing.Any]:
raise NotImplemented()
class ProjectGrouper(Grouper):
@classmethod
def label(cls, project: dict):
return project['name']
@classmethod
def url(cls, project: dict):
return url_for('projects.view', project_url=project['url'])
@classmethod
def group_key(cls) -> typing.Callable[[dict], typing.Any]:
return itemgetter('project')
@classmethod
def sort_key(cls) -> typing.Callable[[dict], typing.Any]:
return lambda node: node['project']['_id']
class UserGrouper(Grouper):
@classmethod
def label(cls, user):
return user['full_name']
@classmethod
def group_key(cls) -> typing.Callable[[dict], typing.Any]:
return itemgetter('user')
@classmethod
def sort_key(cls) -> typing.Callable[[dict], typing.Any]:
return lambda node: node['user']['_id']
class TimeLineBuilder:
def __init__(self, search_helper: SearchHelper, grouper: typing.Type[Grouper]):
self.search_helper = search_helper
self.grouper = grouper
self.continue_from = None
def build(self) -> TimelineDO:
raw_weeks = self.search_helper.raw_weeks_from_mongo()
clean_weeks = (self.create_week_group(week) for week in raw_weeks)
return TimelineDO(
groups=list(clean_weeks),
continue_from=self.continue_from.timestamp() if self.search_helper.has_more(self.continue_from) else None
)
def create_week_group(self, week: dict) -> GroupDO:
nodes = week['nodes']
nodes.sort(key=itemgetter('_created'), reverse=True)
self.update_continue_from(nodes)
groups = self.create_groups(nodes)
return GroupDO(
label=f'Week {week["_id"]["week"]}, {week["_id"]["year"]}',
groups=groups
)
def create_groups(self, nodes: typing.List[dict]) -> typing.List[GroupDO]:
self.sort_nodes(nodes) # groupby assumes that the list is sorted
nodes_grouped = itertools.groupby(nodes, self.grouper.group_key())
groups = (self.clean_group(grouped_by, group) for grouped_by, group in nodes_grouped)
groups_sorted = sorted(groups, key=self.group_row_sorter, reverse=True)
return groups_sorted
def sort_nodes(self, nodes: typing.List[dict]):
nodes.sort(key=itemgetter('node_type'))
nodes.sort(key=self.grouper.sort_key())
def update_continue_from(self, sorted_nodes: typing.List[dict]):
if self.search_helper.sort_direction == 'desc':
first_created = sorted_nodes[-1]['_created']
candidate = self.continue_from or first_created
self.continue_from = min(candidate, first_created)
else:
last_created = sorted_nodes[0]['_created']
candidate = self.continue_from or last_created
self.continue_from = max(candidate, last_created)
def clean_group(self, grouped_by: typing.Any, group: typing.Iterable[dict]) -> GroupDO:
items = self.create_items(group)
return GroupDO(
label=self.grouper.label(grouped_by),
url=self.grouper.url(grouped_by),
items=items
)
def create_items(self, group) -> typing.List[dict]:
by_node_type = itertools.groupby(group, key=itemgetter('node_type'))
items = {}
for node_type, nodes in by_node_type:
items[node_type] = [self.node_prettyfy(n) for n in nodes]
return items
@classmethod
def node_prettyfy(cls, node: dict)-> dict:
node['pretty_created'] = pretty_date(node['_created'])
duration_seconds = node['properties'].get('duration_seconds')
if duration_seconds is not None:
node['properties']['duration'] = pretty_duration(duration_seconds)
return node
@classmethod
def group_row_sorter(cls, row: GroupDO) -> typing.Tuple[datetime, datetime]:
'''
If a group contains posts are more interesting and therefor we put them higher in up
:param row:
:return: tuple with newest post date and newest asset date
'''
def newest_created(nodes: typing.List[dict]) -> datetime:
if nodes:
return nodes[0]['_created']
return datetime.fromtimestamp(0, tz=bson.tz_util.utc)
newest_post_date = newest_created(row.items.get('post'))
newest_asset_date = newest_created(row.items.get('asset'))
return newest_post_date, newest_asset_date
def _public_project_ids() -> typing.List[bson.ObjectId]:
"""Returns a list of ObjectIDs of public projects.
Memoized in setup_app().
"""
proj_coll = current_app.db('projects')
result = proj_coll.find({'is_private': False}, {'_id': 1})
return [p['_id'] for p in result]
@blueprint.route('/', methods=['GET'])
def global_timeline():
continue_from_str = request.args.get('from')
continue_from = parse_continue_from(continue_from_str)
nbr_of_weeks_str = request.args.get('weeksToLoad')
nbr_of_weeks = parse_nbr_of_weeks(nbr_of_weeks_str)
sort_direction = request.args.get('dir', 'desc')
return _global_timeline(continue_from, nbr_of_weeks, sort_direction)
@blueprint.route('/p/<string(length=24):str_pid>', methods=['GET'])
def project_timeline(pid_path: str):
continue_from_str = request.args.get('from')
continue_from = parse_continue_from(continue_from_str)
nbr_of_weeks_str = request.args.get('weeksToLoad')
nbr_of_weeks = parse_nbr_of_weeks(nbr_of_weeks_str)
sort_direction = request.args.get('dir', 'desc')
pid = str2id(pid_path)
return _project_timeline(continue_from, nbr_of_weeks, sort_direction, pid)
def parse_continue_from(from_arg) -> typing.Optional[datetime]:
try:
from_float = float(from_arg)
except (TypeError, ValueError):
return None
return datetime.fromtimestamp(from_float, tz=bson.tz_util.utc)
def parse_nbr_of_weeks(weeks_to_load: str) -> int:
try:
return int(weeks_to_load)
except (TypeError, ValueError):
return 3
def _global_timeline(continue_from: typing.Optional[datetime], nbr_of_weeks: int, sort_direction: str):
"""Returns an aggregated view of what has happened on the site
Memoized in setup_app().
:param continue_from: Python utc timestamp where to begin aggregation
:param nbr_of_weeks: Number of weeks to return
Example output:
{
groups: [{
label: 'Week 32',
groups: [{
label: 'Spring',
url: '/p/spring',
items:{
post: [blogPostDoc, blogPostDoc],
asset: [assetDoc, assetDoc]
},
groups: ...
}]
}],
continue_from: 123456.2 // python timestamp
}
"""
builder = TimeLineBuilder(
SearchHelper(nbr_of_weeks, continue_from, _public_project_ids(), sort_direction),
ProjectGrouper
)
return jsonify_timeline(builder.build())
def jsonify_timeline(timeline: TimelineDO):
return jsonify(
attr.asdict(timeline,
recurse=True,
filter=lambda att, value: value is not None)
)
def _project_timeline(continue_from: typing.Optional[datetime], nbr_of_weeks: int, sort_direction, pid: bson.ObjectId):
"""Returns an aggregated view of what has happened on the site
Memoized in setup_app().
:param continue_from: Python utc timestamp where to begin aggregation
:param nbr_of_weeks: Number of weeks to return
Example output:
{
groups: [{
label: 'Week 32',
groups: [{
label: 'Tobias Johansson',
items:{
post: [blogPostDoc, blogPostDoc],
asset: [assetDoc, assetDoc]
},
groups: ...
}]
}],
continue_from: 123456.2 // python timestamp
}
"""
builder = TimeLineBuilder(
SearchHelper(nbr_of_weeks, continue_from, [pid], sort_direction),
UserGrouper
)
return jsonify_timeline(builder.build())
def setup_app(app, url_prefix):
global _public_project_ids
global _global_timeline
global _project_timeline
app.register_api_blueprint(blueprint, url_prefix=url_prefix)
cached = app.cache.cached(timeout=3600)
_public_project_ids = cached(_public_project_ids)
memoize = app.cache.memoize(timeout=60)
_global_timeline = memoize(_global_timeline)
_project_timeline = memoize(_project_timeline)

View File

@ -1,7 +1,7 @@
# Primary requirements
-r ../pillar-python-sdk/requirements.txt
attrs==16.2.0
attrs==18.2.0
algoliasearch==1.12.0
bcrypt==3.1.3
blinker==1.4

View File

@ -0,0 +1 @@
export { Nodes } from './templates/templates'

View File

@ -0,0 +1,115 @@
import { Assets } from '../assets'
import {} from ''
jest.useFakeTimers();
describe('Assets', () => {
describe('create$listItem', () => {
let nodeDoc;
let spyGet;
beforeEach(()=>{
nodeDoc = {
_id: 'my-asset-id',
name: 'My Asset',
pretty_created: '2 hours ago',
node_type: 'asset',
project: {
name: 'My Project',
url: 'url-to-project'
},
properties: {
content_type: 'image'
}
};
spyGet = spyOn($, 'get').and.callFake(function(url) {
let ajaxMock = $.Deferred();
let response = {
variations: [{
size: 'l',
link: 'wrong-img-link',
width: 150,
height: 170,
},{
size: 'm',
link: 'img-link',
width: 50,
height: 70,
},{
size: 's',
link: 'wrong-img-link',
width: 5,
height: 7,
}]
}
ajaxMock.resolve(response);
return ajaxMock.promise();
});
});
describe('image content', () => {
test('node with picture', done => {
nodeDoc.picture = 'picture_id';
let $card = Assets.create$listItem(nodeDoc);
jest.runAllTimers();
expect($card.length).toEqual(1);
expect($card.prop('tagName')).toEqual('A');
expect($card.hasClass('card asset')).toBeTruthy();
expect($card.attr('href')).toEqual('/nodes/my-asset-id/redir');
expect($card.attr('title')).toEqual('My Asset');
let $body = $card.find('.card-body');
expect($body.length).toEqual(1);
let $title = $body.find('.card-title');
expect($title.length).toEqual(1);
expect(spyGet).toHaveBeenCalledTimes(1);
expect(spyGet).toHaveBeenLastCalledWith('/api/files/picture_id');
let $image = $card.find('img');
expect($image.length).toEqual(1);
let $imageSubsititure = $card.find('.pi-asset');
expect($imageSubsititure.length).toEqual(0);
let $progress = $card.find('.progress');
expect($progress.length).toEqual(0);
let $watched = $card.find('.card-label');
expect($watched.length).toEqual(0);
done();
});
test('node without picture', done => {
let $card = Assets.create$listItem(nodeDoc);
expect($card.length).toEqual(1);
expect($card.prop('tagName')).toEqual('A');
expect($card.hasClass('card asset')).toBeTruthy();
expect($card.attr('href')).toEqual('/nodes/my-asset-id/redir');
expect($card.attr('title')).toEqual('My Asset');
let $body = $card.find('.card-body');
expect($body.length).toEqual(1);
let $title = $body.find('.card-title');
expect($title.length).toEqual(1);
expect(spyGet).toHaveBeenCalledTimes(0);
let $image = $card.find('img');
expect($image.length).toEqual(0);
let $imageSubsititure = $card.find('.pi-asset');
expect($imageSubsititure.length).toEqual(1);
let $progress = $card.find('.progress');
expect($progress.length).toEqual(0);
let $watched = $card.find('.card-label');
expect($watched.length).toEqual(0);
done();
});
});
})
});

View File

@ -0,0 +1,97 @@
import { NodesFactoryInterface } from './nodes'
import { thenLoadImage, thenLoadVideoProgress } from './utils';
class Assets extends NodesFactoryInterface{
static create$listItem(node) {
var markIfPublic = true;
let $card = $('<a class="card asset card-image-fade pr-0 mx-0 mb-2">')
.addClass('js-tagged-asset')
.attr('href', '/nodes/' + node._id + '/redir')
.attr('title', node.name);
let $thumbnailContainer = $('<div class="embed-responsive embed-responsive-16by9">');
function warnNoPicture() {
let $cardIcon = $('<div class="card-img-top card-icon embed-responsive-item">');
$cardIcon.html('<i class="pi-' + node.node_type + '">');
$thumbnailContainer.append($cardIcon);
}
if (!node.picture) {
warnNoPicture();
} else {
$(window).trigger('pillar:workStart');
thenLoadImage(node.picture)
.fail(warnNoPicture)
.then((imgVariation)=>{
let img = $('<img class="card-img-top embed-responsive-item">')
.attr('alt', node.name)
.attr('src', imgVariation.link)
.attr('width', imgVariation.width)
.attr('height', imgVariation.height);
$thumbnailContainer.append(img);
})
.always(function(){
$(window).trigger('pillar:workStop');
});
}
$card.append($thumbnailContainer);
/* Card body for title and meta info. */
let $cardBody = $('<div class="card-body py-2 d-flex flex-column">');
let $cardTitle = $('<div class="card-title mb-1 font-weight-bold">');
$cardTitle.text(node.name);
$cardBody.append($cardTitle);
let $cardMeta = $('<ul class="card-text list-unstyled d-flex text-black-50 mt-auto">');
let $cardProject = $('<a class="font-weight-bold pr-2">')
.attr('href', '/p/' + node.project.url)
.attr('title', node.project.name)
.text(node.project.name);
$cardMeta.append($cardProject);
$cardMeta.append('<li>' + node.pretty_created + '</li>');
$cardBody.append($cardMeta);
if (node.properties.duration){
let $cardDuration = $('<div class="card-label right">' + node.properties.duration + '</div>');
$thumbnailContainer.append($cardDuration);
/* Video progress and 'watched' label. */
$(window).trigger('pillar:workStart');
thenLoadVideoProgress(node._id)
.fail(console.log)
.then((view_progress)=>{
if (!view_progress) return
let $cardProgress = $('<div class="progress rounded-0">');
let $cardProgressBar = $('<div class="progress-bar">');
$cardProgressBar.css('width', view_progress.progress_in_percent + '%');
$cardProgress.append($cardProgressBar);
$thumbnailContainer.append($cardProgress);
if (view_progress.done){
let card_progress_done = $('<div class="card-label">WATCHED</div>');
$thumbnailContainer.append(card_progress_done);
}
})
.always(function() {
$(window).trigger('pillar:workStop');
});
}
/* 'Free' ribbon for public assets. */
if (markIfPublic && node.permissions && node.permissions.world){
$card.addClass('free');
}
$card.append($cardBody);
return $card;
}
}
export { Assets };

View File

@ -0,0 +1,48 @@
let CREATE_NODE_ITEM_MAP = {}
class Nodes {
static create$listItem(node) {
return CREATE_NODE_ITEM_MAP[node.node_type].create$listItem(node);
}
static create$item(node) {
return CREATE_NODE_ITEM_MAP[node.node_type].create$item(node);
}
static createListOf$nodeItems(nodes, initial=8, loadNext=8) {
let nodesLeftToRender = nodes.slice();
let nodesToCreate = nodesLeftToRender.splice(0, initial);
let listOf$items = nodesToCreate.map(Nodes.create$listItem);
if (loadNext > 0 && nodesLeftToRender.length) {
let $link = $('<a>')
.addClass('btn btn-outline-primary px-5 mb-auto btn-block js-load-next')
.attr('href', 'javascript:void(0);')
.click((e)=> {
let $target = $(e.target);
$target.replaceWith(Nodes.createListOf$nodeItems(nodesLeftToRender, loadNext, loadNext));
})
.text('Load More');
listOf$items.push($link);
}
return listOf$items;
}
static registerTemplate(key, klass) {
CREATE_NODE_ITEM_MAP[key] = klass;
}
}
class NodesFactoryInterface{
static create$listItem(node) {
throw 'Not Implemented'
}
static create$item(node) {
throw 'Not Implemented'
}
}
export { Nodes, NodesFactoryInterface };

View File

@ -0,0 +1,52 @@
import { NodesFactoryInterface } from './nodes'
import { thenLoadImage } from './utils';
class Posts extends NodesFactoryInterface {
static create$item(post) {
let content = [];
let $title = $('<div>')
.addClass('display-4 text-uppercase font-weight-bold')
.text(post.name);
content.push($title);
let $text = $('<div>')
.addClass('lead')
.text(post['pretty_created']);
content.push($text);
let $jumbotron = $('<a>')
.addClass('jumbotron text-white jumbotron-overlay')
.attr('href', '/nodes/' + post._id + '/redir')
.append(
$('<div>')
.addClass('container')
.append(
$('<div>')
.addClass('row')
.append(
$('<div>')
.addClass('col-md-9')
.append(content)
)
)
);
thenLoadImage(post.picture, 'l')
.then((img)=>{
$jumbotron.attr('style', 'background-image: url(' + img.link + ');')
})
.fail((error)=>{
let msg = xhrErrorResponseMessage(error);
console.log(msg || error);
})
let $post = $('<div>')
.addClass('expand-image-links imgs-fluid')
.append(
$jumbotron,
$('<div>')
.addClass('node-details-description mx-auto py-5')
.html(post['properties']['_content_html'])
);
return $post;
}
}
export { Posts };

View File

@ -0,0 +1,8 @@
import { Nodes } from './nodes';
import { Assets } from './assets';
import { Posts } from './posts';
Nodes.registerTemplate('asset', Assets);
Nodes.registerTemplate('post', Posts);
export { Nodes };

View File

@ -0,0 +1,24 @@
function thenLoadImage(imgId, size = 'm') {
return $.get('/api/files/' + imgId)
.then((resp)=> {
var show_variation = null;
if (typeof resp.variations != 'undefined') {
for (var variation of resp.variations) {
if (variation.size != size) continue;
show_variation = variation;
break;
}
}
if (show_variation == null) {
throw 'Image not found: ' + imgId + ' size: ' + size;
}
return show_variation;
})
}
function thenLoadVideoProgress(nodeId) {
return $.get('/api/users/video/' + nodeId + '/progress')
}
export { thenLoadImage, thenLoadVideoProgress };

View File

@ -0,0 +1 @@
export { Timeline } from './timeline/timeline';

View File

@ -0,0 +1,180 @@
/**
* Consumes data in the form:
* {
* groups: [{
* label: 'Week 32',
* url: null, // optional
* groups: [{
* label: 'Spring',
* url: '/p/spring',
* items:{
* post: [nodeDoc, nodeDoc], // primary (fully rendered)
* asset: [nodeDoc, nodeDoc] // secondary (rendered as list item)
* },
* groups: ...
* }]
* }],
* continue_from: 123456.2 // python timestamp
* }
*/
class Timeline {
constructor(target, params, builder) {
this._$targetDom = $(target)
this._url = params['url'];
this._queryParams = params['queryParams'] || {};
this._builder = builder;
this._init();
}
_init() {
this._workStart();
this._thenLoadMore()
.then((it)=>{
this._$targetDom.empty();
this._$targetDom.append(it);
if (this._hasMore()) {
let btn = this._create$LoadMoreBtn();
this._$targetDom.append(btn);
}
})
.always(this._workStop.bind(this));
}
_loadMore(event) {
let $spinner = $('<i>').addClass('pi-spin spinner');
let $loadmoreBtn = $(event.target)
.append($spinner)
.addClass('disabled');
this._workStart();
this._thenLoadMore()
.then((it)=>{
$loadmoreBtn.before(it);
})
.always(()=>{
if (this._hasMore()) {
$loadmoreBtn.removeClass('disabled');
$spinner.remove();
} else {
$loadmoreBtn.remove();
}
this._workStop();
});
}
_hasMore() {
return !!this._queryParams['from'];
}
_thenLoadMore() {
this._workStart();
let qParams = $.param(this._queryParams);
return $.getJSON(this._url + '?' + qParams)
.then(this._render.bind(this))
.fail(this._workFailed.bind(this))
.always(this._workStop.bind(this))
}
_render(toRender) {
this._queryParams['from'] = toRender['continue_from'];
return toRender['groups']
.map(this._create$Group.bind(this));
}
_create$Group(group) {
return this._builder.build$Group(0, group);
}
_create$LoadMoreBtn() {
return $('<a>')
.addClass('btn btn-outline-primary js-load-next')
.attr('href', 'javascript:void(0);')
.click(this._loadMore.bind(this))
.text('Load More Weeks');
}
_workStart() {
this._$targetDom.trigger('pillar:workStart');
return arguments;
}
_workStop() {
this._$targetDom.trigger('pillar:workStop');
return arguments;
}
_workFailed(error) {
let msg = xhrErrorResponseMessage(error);
this._$targetDom.trigger('pillar:failure', msg);
return error;
}
}
class GroupBuilder {
build$Group(level, group) {
let content = []
let $label = this._create$Label(level, group['label'], group['url']);
if (group['items']) {
content = content.concat(this._create$Items(group['items']));
}
if(group['groups']) {
content = content.concat(group['groups'].map(this.build$Group.bind(this, level+1)));
}
return $('<div>')
.append(
$label,
content
);
}
_create$Items(items) {
let content = [];
let primaryNodes = items['post'];
let secondaryNodes = items['asset'];
if (primaryNodes) {
content.push(
$('<div>')
.append(primaryNodes.map(pillar.templates.Nodes.create$item))
);
}
if (secondaryNodes) {
content.push(
$('<div>')
.addClass('card-deck card-padless card-deck-responsive card-undefined-columns js-asset-list py-3')
.append(pillar.templates.Nodes.createListOf$nodeItems(secondaryNodes))
);
}
return content;
}
_create$Label(level, label, url) {
let size = level == 0 ? 'h5' : 'h6'
if (url) {
return $('<div>')
.addClass(size +' sticky-top')
.append(
$('<a>')
.addClass('text-muted')
.attr('href', url)
.text(label)
);
}
return $('<div>')
.addClass(size + ' text-muted sticky-top')
.text(label);
}
}
$.fn.extend({
timeline: function(params) {
this.each(function(i, target) {
new Timeline(target,
params || {},
new GroupBuilder()
);
});
}
})
export { Timeline };

View File

@ -0,0 +1,2 @@
import $ from 'jquery';
global.$ = global.jQuery = $;

View File

@ -0,0 +1,24 @@
.timeline
.jumbotron
padding-top: 6em
padding-bottom: 6em
*
font-size: $h1-font-size
.lead
font-size: $font-size-base
.h5
text-align: right
background: $body-bg
opacity: 0.8
margin-right: -15px
margin-left: -15px
padding-right: 15px
.sticky-top
top: 2.5rem
body.is-mobile
.timeline
.js-asset-list
@extend .card-deck-vertical

View File

@ -57,6 +57,7 @@
@import "components/shortcode"
@import "components/statusbar"
@import "components/search"
@import "components/timeline"
@import "components/flyout"
@import "components/forms"

View File

@ -0,0 +1,200 @@
from datetime import timedelta, datetime
import bson
import flask
from pillar.tests import AbstractPillarTest
class GlobalTimelineTest(AbstractPillarTest):
def setUp(self, **kwargs):
super().setUp(**kwargs)
self.pid1, _ = self.ensure_project_exists()
self.pid2, _ = self.ensure_project_exists(project_overrides={
'name': 'Another Project',
'url': 'another-url',
'_id': bson.ObjectId('8572beecc0261b2005ed1a85'),
})
self.private_pid, _ = self.ensure_project_exists(project_overrides={
'_id': '5672beecc0261b2005ed1a34',
'is_private': True,
})
self.file_id, _ = self.ensure_file_exists(file_overrides={
'variations': [
{'format': 'mp4',
'duration': 3661 # 01:01:01
},
],
})
self.uid = self.create_user()
self.fake_now = datetime.fromtimestamp(1521540308.0, tz=bson.tz_util.utc) # A Tuesday
self.all_asset_pid1_ids = [str(self.create_asset(self.pid1, i, 0)) for i in range(25)]
self.all_asset_pid2_ids = [str(self.create_asset(self.pid2, i, 1)) for i in range(25)]
self.all_asset_private_pid_ids = [str(self.create_asset(self.private_pid, i, 2)) for i in range(25)]
self.all_post_pid1_ids = [str(self.create_post(self.pid1, i, 3)) for i in range(25)]
self.all_post_pid2_ids = [str(self.create_post(self.pid2, i, 4)) for i in range(25)]
self.all_post_private_pid_ids = [str(self.create_post(self.private_pid, i, 5)) for i in range(25)]
def test_timeline_latest(self):
with self.app.app_context():
url = flask.url_for('timeline.global_timeline')
response = self.get(url).json
timeline = response['groups']
continue_from = response['continue_from']
self.assertEquals(1520229908.0, continue_from)
self.assertEquals(3, len(timeline))
self.assertEquals('Week 11, 2018', timeline[1]['label'])
self.assertEquals('Week 10, 2018', timeline[2]['label'])
self.assertEquals('Unittest project', timeline[0]['groups'][0]['label'])
self.assertEquals('Another Project', timeline[0]['groups'][1]['label'])
self.assertEquals('/p/default-project/', timeline[0]['groups'][0]['url'])
self.assertEquals('/p/another-url/', timeline[0]['groups'][1]['url'])
# week 12
week = timeline[0]
self.assertEquals('Week 12, 2018', week['label'])
proj_pid1 = week['groups'][0]
expected_post_ids = self.all_post_pid1_ids[0:2]
expected_asset_ids = self.all_asset_pid1_ids[0:2]
self.assertProjectEquals(proj_pid1, 'Unittest project', '/p/default-project/',
expected_post_ids, expected_asset_ids)
proj_pid2 = week['groups'][1]
expected_post_ids = self.all_post_pid2_ids[0:2]
expected_asset_ids = self.all_asset_pid2_ids[0:2]
self.assertProjectEquals(proj_pid2, 'Another Project', '/p/another-url/',
expected_post_ids, expected_asset_ids)
# week 11
week = timeline[1]
self.assertEquals('Week 11, 2018', week['label'])
proj_pid1 = week['groups'][0]
expected_post_ids = self.all_post_pid1_ids[2:9]
expected_asset_ids = self.all_asset_pid1_ids[2:9]
self.assertProjectEquals(proj_pid1, 'Unittest project', '/p/default-project/',
expected_post_ids, expected_asset_ids)
proj_pid2 = week['groups'][1]
expected_post_ids = self.all_post_pid2_ids[2:9]
expected_asset_ids = self.all_asset_pid2_ids[2:9]
self.assertProjectEquals(proj_pid2, 'Another Project', '/p/another-url/',
expected_post_ids, expected_asset_ids)
# week 10
week = timeline[2]
self.assertEquals('Week 10, 2018', week['label'])
proj_pid1 = week['groups'][0]
expected_post_ids = self.all_post_pid1_ids[9:16]
expected_asset_ids = self.all_asset_pid1_ids[9:16]
self.assertProjectEquals(proj_pid1, 'Unittest project', '/p/default-project/',
expected_post_ids, expected_asset_ids)
proj_pid2 = week['groups'][1]
expected_post_ids = self.all_post_pid2_ids[9:16]
expected_asset_ids = self.all_asset_pid2_ids[9:16]
self.assertProjectEquals(proj_pid2, 'Another Project', '/p/another-url/',
expected_post_ids, expected_asset_ids)
def test_timeline_continue_from(self):
with self.app.app_context():
url = flask.url_for('timeline.global_timeline')
response = self.get(url + '?from=1520229908.0').json
timeline = response['groups']
self.assertNotIn('continue_from', response)
self.assertEquals(2, len(timeline))
self.assertEquals('Week 9, 2018', timeline[0]['label'])
self.assertEquals('Week 8, 2018', timeline[1]['label'])
self.assertEquals('Unittest project', timeline[0]['groups'][0]['label'])
self.assertEquals('Another Project', timeline[0]['groups'][1]['label'])
self.assertEquals('/p/default-project/', timeline[0]['groups'][0]['url'])
# week 9
week = timeline[0]
self.assertEquals('Week 9, 2018', week['label'])
proj_pid1 = week['groups'][0]
expected_post_ids = self.all_post_pid1_ids[16:23]
expected_asset_ids = self.all_asset_pid1_ids[16:23]
self.assertProjectEquals(proj_pid1, 'Unittest project', '/p/default-project/',
expected_post_ids, expected_asset_ids)
proj_pid2 = week['groups'][1]
expected_post_ids = self.all_post_pid2_ids[16:23]
expected_asset_ids = self.all_asset_pid2_ids[16:23]
self.assertProjectEquals(proj_pid2, 'Another Project', '/p/another-url/',
expected_post_ids, expected_asset_ids)
# week 8
week = timeline[1]
self.assertEquals('Week 8, 2018', week['label'])
proj_pid1 = week['groups'][0]
expected_post_ids = self.all_post_pid1_ids[23:25]
expected_asset_ids = self.all_asset_pid1_ids[23:25]
self.assertProjectEquals(proj_pid1, 'Unittest project', '/p/default-project/',
expected_post_ids, expected_asset_ids)
proj_pid2 = week['groups'][1]
expected_post_ids = self.all_post_pid2_ids[23:25]
expected_asset_ids = self.all_asset_pid2_ids[23:25]
self.assertProjectEquals(proj_pid2, 'Another Project', '/p/another-url/',
expected_post_ids, expected_asset_ids)
def assertProjectEquals(self, proj, label, url, expected_post_ids, expected_asset_ids):
self.assertEquals(label, proj['label'])
self.assertEquals(url, proj['url'])
actual_ids = [n['_id'] for n in proj['items']['post']]
self.assertEquals(expected_post_ids, actual_ids)
actual_ids = [n['_id'] for n in proj['items']['asset']]
self.assertEquals(expected_asset_ids, actual_ids)
def create_asset(self, pid, days, hours):
asset_node = {
'name': 'Just a node name',
'description': '',
'node_type': 'asset',
'user': self.uid,
}
asset_props = {
'status': 'published',
'file': self.file_id,
'content_type': 'video',
'order': 0
}
return self.create_node({
**asset_node,
'project': pid,
'_created': self.fake_now - timedelta(days=days, hours=hours),
'properties': asset_props,
})
def create_post(self, pid, days, hours):
post_node = {
'name': 'Just a node name',
'description': '',
'node_type': 'post',
'user': self.uid,
}
post_props = {
'status': 'published',
'content': 'blablabla',
'order': 0
}
return self.create_node({
**post_node,
'project': pid,
'_created': self.fake_now - timedelta(days=days, hours=hours),
'properties': post_props,
})