diff --git a/.babelrc b/.babelrc new file mode 100644 index 00000000..ff3059c3 --- /dev/null +++ b/.babelrc @@ -0,0 +1,3 @@ +{ + "presets": ["@babel/preset-env"] +} \ No newline at end of file diff --git a/gulpfile.js b/gulpfile.js index 0791eb4a..6c8633cd 100644 --- a/gulpfile.js +++ b/gulpfile.js @@ -13,6 +13,13 @@ let rename = require('gulp-rename'); let sass = require('gulp-sass'); let sourcemaps = require('gulp-sourcemaps'); let uglify = require('gulp-uglify-es').default; +let browserify = require('browserify'); +let babelify = require('babelify'); +let sourceStream = require('vinyl-source-stream'); +let glob = require('glob'); +let es = require('event-stream'); +let path = require('path'); +let buffer = require('vinyl-buffer'); let enabled = { uglify: argv.production, @@ -81,6 +88,41 @@ gulp.task('scripts', function(done) { done(); }); +function browserify_base(entry) { + return browserify({ + entries: [entry], + standalone: 'pillar.' + path.basename(entry, '.js'), + }) + .transform(babelify, { "presets": ["@babel/preset-env"] }) + .bundle() + .pipe(gulpif(enabled.failCheck, plumber())) + .pipe(sourceStream(path.basename(entry))) + .pipe(buffer()) + .pipe(rename({ + extname: '.min.js' + })); +} + +function browserify_common() { + return glob.sync('src/scripts/js/es6/common/*.js').map(browserify_base); +} + +gulp.task('scripts_browserify', function(done) { + glob('src/scripts/js/es6/individual/*.js', function(err, files) { + if(err) done(err); + + var tasks = files.map(function(entry) { + return browserify_base(entry) + .pipe(gulpif(enabled.maps, sourcemaps.init())) + .pipe(gulpif(enabled.uglify, uglify())) + .pipe(gulpif(enabled.maps, sourcemaps.write("."))) + .pipe(gulp.dest(destination.js)); + }); + + es.merge(tasks).on('end', done); + }) +}); + /* Collection of scripts in src/scripts/tutti/ to merge into tutti.min.js * Since it's always loaded, it's only for functions that we want site-wide. @@ -88,7 +130,7 @@ gulp.task('scripts', function(done) { * the site doesn't work without it anyway.*/ gulp.task('scripts_concat_tutti', function(done) { - toUglify = [ + let toUglify = [ source.jquery + 'dist/jquery.min.js', source.popper + 'dist/umd/popper.min.js', source.bootstrap + 'js/dist/index.js', @@ -100,7 +142,7 @@ gulp.task('scripts_concat_tutti', function(done) { 'src/scripts/tutti/**/*.js' ]; - gulp.src(toUglify) + es.merge(gulp.src(toUglify), ...browserify_common()) .pipe(gulpif(enabled.failCheck, plumber())) .pipe(gulpif(enabled.maps, sourcemaps.init())) .pipe(concat("tutti.min.js")) @@ -137,7 +179,7 @@ gulp.task('watch',function(done) { gulp.watch('src/templates/**/*.pug',gulp.series('templates')); gulp.watch('src/scripts/*.js',gulp.series('scripts')); gulp.watch('src/scripts/tutti/**/*.js',gulp.series('scripts_concat_tutti')); - + gulp.watch('src/scripts/js/**/*.js',gulp.series(['scripts_browserify', 'scripts_concat_tutti'])); done(); }); @@ -167,4 +209,5 @@ gulp.task('default', gulp.parallel(tasks.concat([ 'scripts', 'scripts_concat_tutti', 'scripts_move_vendor', + 'scripts_browserify', ]))); diff --git a/jest.config.js b/jest.config.js new file mode 100644 index 00000000..07214bf5 --- /dev/null +++ b/jest.config.js @@ -0,0 +1,180 @@ +// For a detailed explanation regarding each configuration property, visit: +// https://jestjs.io/docs/en/configuration.html + +module.exports = { + // All imported modules in your tests should be mocked automatically + // automock: false, + + // Stop running tests after the first failure + // bail: false, + + // Respect "browser" field in package.json when resolving modules + // browser: false, + + // The directory where Jest should store its cached dependency information + // cacheDirectory: "/tmp/jest_rs", + + // Automatically clear mock calls and instances between every test + clearMocks: true, + + // Indicates whether the coverage information should be collected while executing the test + // collectCoverage: false, + + // An array of glob patterns indicating a set of files for which coverage information should be collected + // collectCoverageFrom: null, + + // The directory where Jest should output its coverage files + // coverageDirectory: null, + + // An array of regexp pattern strings used to skip coverage collection + // coveragePathIgnorePatterns: [ + // "/node_modules/" + // ], + + // A list of reporter names that Jest uses when writing coverage reports + // coverageReporters: [ + // "json", + // "text", + // "lcov", + // "clover" + // ], + + // An object that configures minimum threshold enforcement for coverage results + // coverageThreshold: null, + + // Make calling deprecated APIs throw helpful error messages + // errorOnDeprecated: false, + + // Force coverage collection from ignored files usin a array of glob patterns + // forceCoverageMatch: [], + + // A path to a module which exports an async function that is triggered once before all test suites + // globalSetup: null, + + // A path to a module which exports an async function that is triggered once after all test suites + // globalTeardown: null, + + // A set of global variables that need to be available in all test environments + // globals: {}, + + // An array of directory names to be searched recursively up from the requiring module's location + // moduleDirectories: [ + // "node_modules" + // ], + + // An array of file extensions your modules use + // moduleFileExtensions: [ + // "js", + // "json", + // "jsx", + // "node" + // ], + + // A map from regular expressions to module names that allow to stub out resources with a single module + // moduleNameMapper: {}, + + // An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader + // modulePathIgnorePatterns: [], + + // Activates notifications for test results + // notify: false, + + // An enum that specifies notification mode. Requires { notify: true } + // notifyMode: "always", + + // A preset that is used as a base for Jest's configuration + // preset: null, + + // Run tests from one or more projects + // projects: null, + + // Use this configuration option to add custom reporters to Jest + // reporters: undefined, + + // Automatically reset mock state between every test + // resetMocks: false, + + // Reset the module registry before running each individual test + // resetModules: false, + + // A path to a custom resolver + // resolver: null, + + // Automatically restore mock state between every test + // restoreMocks: false, + + // The root directory that Jest should scan for tests and modules within + // rootDir: null, + + // A list of paths to directories that Jest should use to search for files in + // roots: [ + // "" + // ], + + // Allows you to use a custom runner instead of Jest's default test runner + // runner: "jest-runner", + + // The paths to modules that run some code to configure or set up the testing environment before each test + setupFiles: ["/src/scripts/js/es6/test_config/test-env.js"], + + // The path to a module that runs some code to configure or set up the testing framework before each test + // setupTestFrameworkScriptFile: null, + + // A list of paths to snapshot serializer modules Jest should use for snapshot testing + // snapshotSerializers: [], + + // The test environment that will be used for testing + testEnvironment: "jsdom", + + // Options that will be passed to the testEnvironment + // testEnvironmentOptions: {}, + + // Adds a location field to test results + // testLocationInResults: false, + + // The glob patterns Jest uses to detect test files + // testMatch: [ + // "**/__tests__/**/*.js?(x)", + // "**/?(*.)+(spec|test).js?(x)" + // ], + + // An array of regexp pattern strings that are matched against all test paths, matched tests are skipped + // testPathIgnorePatterns: [ + // "/node_modules/" + // ], + + // The regexp pattern Jest uses to detect test files + // testRegex: "", + + // This option allows the use of a custom results processor + // testResultsProcessor: null, + + // This option allows use of a custom test runner + // testRunner: "jasmine2", + + // This option sets the URL for the jsdom environment. It is reflected in properties such as location.href + // testURL: "http://localhost", + + // Setting this value to "fake" allows the use of fake timers for functions such as "setTimeout" + // timers: "real", + + // A map from regular expressions to paths to transformers + // transform: null, + + // An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation + // transformIgnorePatterns: [ + // "/node_modules/" + // ], + + // An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them + // unmockedModulePathPatterns: undefined, + + // Indicates whether each individual test should be reported during the run + // verbose: null, + + // An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode + // watchPathIgnorePatterns: [], + + // Whether to use watchman for file crawling + // watchman: true, +}; diff --git a/package.json b/package.json index 6be48d61..f6fe4d22 100644 --- a/package.json +++ b/package.json @@ -7,8 +7,15 @@ "url": "git://git.blender.org/pillar.git" }, "devDependencies": { + "@babel/core": "^7.1.2", + "@babel/preset-env": "^7.1.0", + "acorn": "^6.0.2", + "babel-core": "^7.0.0-bridge.0", + "babelify": "^10.0.0", + "browserify": "^16.2.3", "gulp": "^4.0.0", "gulp-autoprefixer": "^6.0.0", + "gulp-babel": "^8.0.0", "gulp-cached": "^1.1.1", "gulp-chmod": "^2.0.0", "gulp-concat": "^2.6.1", @@ -21,12 +28,17 @@ "gulp-sass": "^4.0.1", "gulp-sourcemaps": "^2.6.4", "gulp-uglify-es": "^1.0.4", - "minimist": "^1.2.0" + "jest": "^23.6.0", + "minimist": "^1.2.0", + "vinyl-source-stream": "^2.0.0" }, "dependencies": { "bootstrap": "^4.1.3", "jquery": "^3.3.1", "popper.js": "^1.14.4", "video.js": "^7.2.2" + }, + "scripts": { + "test": "jest" } } diff --git a/pillar/api/__init__.py b/pillar/api/__init__.py index bf513675..a74b91ea 100644 --- a/pillar/api/__init__.py +++ b/pillar/api/__init__.py @@ -1,6 +1,6 @@ def setup_app(app): from . import encoding, blender_id, projects, local_auth, file_storage - from . import users, nodes, latest, blender_cloud, service, activities + from . import users, nodes, latest, blender_cloud, service, activities, timeline from . import organizations from . import search @@ -11,6 +11,7 @@ def setup_app(app): local_auth.setup_app(app, url_prefix='/auth') file_storage.setup_app(app, url_prefix='/storage') latest.setup_app(app, url_prefix='/latest') + timeline.setup_app(app, url_prefix='/timeline') blender_cloud.setup_app(app, url_prefix='/bcloud') users.setup_app(app, api_prefix='/users') service.setup_app(app, api_prefix='/service') diff --git a/pillar/api/timeline.py b/pillar/api/timeline.py new file mode 100644 index 00000000..6735ec08 --- /dev/null +++ b/pillar/api/timeline.py @@ -0,0 +1,360 @@ +import itertools +import typing +from datetime import datetime +from operator import itemgetter + +import attr +import bson +import pymongo +from flask import Blueprint, current_app, request, url_for + +from pillar.api.utils import jsonify, pretty_duration, str2id +from pillar.web.utils import pretty_date + +blueprint = Blueprint('timeline', __name__) + + +@attr.s(auto_attribs=True) +class TimelineDO: + groups: typing.List['GroupDO'] = [] + continue_from: typing.Optional[float] = None + + +@attr.s(auto_attribs=True) +class GroupDO: + label: typing.Optional[str] = None + url: typing.Optional[str] = None + items: typing.Dict = {} + groups: typing.Iterable['GroupDO'] = [] + + +class SearchHelper: + def __init__(self, nbr_of_weeks: int, continue_from: typing.Optional[datetime], + project_ids: typing.List[bson.ObjectId], sort_direction: str): + self._nbr_of_weeks = nbr_of_weeks + self._continue_from = continue_from + self._project_ids = project_ids + self.sort_direction = sort_direction + + def _match(self, continue_from: typing.Optional[datetime]) -> dict: + created = {} + if continue_from: + if self.sort_direction == 'desc': + created = {'_created': {'$lt': continue_from}} + else: + created = {'_created': {'$gt': continue_from}} + return {'_deleted': {'$ne': True}, + 'node_type': {'$in': ['asset', 'post']}, + 'project': {'$in': self._project_ids}, + **created, + } + + def raw_weeks_from_mongo(self) -> pymongo.collection.Collection: + direction = pymongo.DESCENDING if self.sort_direction == 'desc' else pymongo.ASCENDING + nodes_coll = current_app.db('nodes') + return nodes_coll.aggregate([ + {'$match': self._match(self._continue_from)}, + {'$lookup': {"from": "projects", + "localField": "project", + "foreignField": "_id", + "as": "project"}}, + {'$unwind': {'path': "$project"}}, + {'$lookup': {"from": "users", + "localField": "user", + "foreignField": "_id", + "as": "user"}}, + {'$unwind': {'path': "$user"}}, + {'$project': { + '_created': 1, + 'project._id': 1, + 'project.url': 1, + 'project.name': 1, + 'user._id': 1, + 'user.full_name': 1, + 'name': 1, + 'node_type': 1, + 'picture': 1, + 'properties': 1, + 'permissions': 1, + }}, + {'$group': { + '_id': {'year': {'$isoWeekYear': '$_created'}, + 'week': {'$isoWeek': '$_created'}}, + 'nodes': {'$push': '$$ROOT'} + }}, + {'$sort': {'_id.year': direction, + '_id.week': direction}}, + {'$limit': self._nbr_of_weeks} + ]) + + def has_more(self, continue_from: datetime) -> bool: + nodes_coll = current_app.db('nodes') + result = nodes_coll.count(self._match(continue_from)) + return bool(result) + + +class Grouper: + @classmethod + def label(cls, node): + return None + + @classmethod + def url(cls, node): + return None + + @classmethod + def group_key(cls) -> typing.Callable[[dict], typing.Any]: + raise NotImplemented() + + @classmethod + def sort_key(cls) -> typing.Callable[[dict], typing.Any]: + raise NotImplemented() + + +class ProjectGrouper(Grouper): + @classmethod + def label(cls, project: dict): + return project['name'] + + @classmethod + def url(cls, project: dict): + return url_for('projects.view', project_url=project['url']) + + @classmethod + def group_key(cls) -> typing.Callable[[dict], typing.Any]: + return itemgetter('project') + + @classmethod + def sort_key(cls) -> typing.Callable[[dict], typing.Any]: + return lambda node: node['project']['_id'] + + +class UserGrouper(Grouper): + @classmethod + def label(cls, user): + return user['full_name'] + + @classmethod + def group_key(cls) -> typing.Callable[[dict], typing.Any]: + return itemgetter('user') + + @classmethod + def sort_key(cls) -> typing.Callable[[dict], typing.Any]: + return lambda node: node['user']['_id'] + + +class TimeLineBuilder: + def __init__(self, search_helper: SearchHelper, grouper: typing.Type[Grouper]): + self.search_helper = search_helper + self.grouper = grouper + self.continue_from = None + + def build(self) -> TimelineDO: + raw_weeks = self.search_helper.raw_weeks_from_mongo() + clean_weeks = (self.create_week_group(week) for week in raw_weeks) + + return TimelineDO( + groups=list(clean_weeks), + continue_from=self.continue_from.timestamp() if self.search_helper.has_more(self.continue_from) else None + ) + + def create_week_group(self, week: dict) -> GroupDO: + nodes = week['nodes'] + nodes.sort(key=itemgetter('_created'), reverse=True) + self.update_continue_from(nodes) + groups = self.create_groups(nodes) + + return GroupDO( + label=f'Week {week["_id"]["week"]}, {week["_id"]["year"]}', + groups=groups + ) + + def create_groups(self, nodes: typing.List[dict]) -> typing.List[GroupDO]: + self.sort_nodes(nodes) # groupby assumes that the list is sorted + nodes_grouped = itertools.groupby(nodes, self.grouper.group_key()) + groups = (self.clean_group(grouped_by, group) for grouped_by, group in nodes_grouped) + groups_sorted = sorted(groups, key=self.group_row_sorter, reverse=True) + return groups_sorted + + def sort_nodes(self, nodes: typing.List[dict]): + nodes.sort(key=itemgetter('node_type')) + nodes.sort(key=self.grouper.sort_key()) + + def update_continue_from(self, sorted_nodes: typing.List[dict]): + if self.search_helper.sort_direction == 'desc': + first_created = sorted_nodes[-1]['_created'] + candidate = self.continue_from or first_created + self.continue_from = min(candidate, first_created) + else: + last_created = sorted_nodes[0]['_created'] + candidate = self.continue_from or last_created + self.continue_from = max(candidate, last_created) + + def clean_group(self, grouped_by: typing.Any, group: typing.Iterable[dict]) -> GroupDO: + items = self.create_items(group) + return GroupDO( + label=self.grouper.label(grouped_by), + url=self.grouper.url(grouped_by), + items=items + ) + + def create_items(self, group) -> typing.List[dict]: + by_node_type = itertools.groupby(group, key=itemgetter('node_type')) + items = {} + for node_type, nodes in by_node_type: + items[node_type] = [self.node_prettyfy(n) for n in nodes] + return items + + @classmethod + def node_prettyfy(cls, node: dict)-> dict: + node['pretty_created'] = pretty_date(node['_created']) + duration_seconds = node['properties'].get('duration_seconds') + if duration_seconds is not None: + node['properties']['duration'] = pretty_duration(duration_seconds) + return node + + @classmethod + def group_row_sorter(cls, row: GroupDO) -> typing.Tuple[datetime, datetime]: + ''' + If a group contains posts are more interesting and therefor we put them higher in up + :param row: + :return: tuple with newest post date and newest asset date + ''' + def newest_created(nodes: typing.List[dict]) -> datetime: + if nodes: + return nodes[0]['_created'] + return datetime.fromtimestamp(0, tz=bson.tz_util.utc) + newest_post_date = newest_created(row.items.get('post')) + newest_asset_date = newest_created(row.items.get('asset')) + return newest_post_date, newest_asset_date + + +def _public_project_ids() -> typing.List[bson.ObjectId]: + """Returns a list of ObjectIDs of public projects. + + Memoized in setup_app(). + """ + + proj_coll = current_app.db('projects') + result = proj_coll.find({'is_private': False}, {'_id': 1}) + return [p['_id'] for p in result] + + +@blueprint.route('/', methods=['GET']) +def global_timeline(): + continue_from_str = request.args.get('from') + continue_from = parse_continue_from(continue_from_str) + nbr_of_weeks_str = request.args.get('weeksToLoad') + nbr_of_weeks = parse_nbr_of_weeks(nbr_of_weeks_str) + sort_direction = request.args.get('dir', 'desc') + return _global_timeline(continue_from, nbr_of_weeks, sort_direction) + + +@blueprint.route('/p/', methods=['GET']) +def project_timeline(pid_path: str): + continue_from_str = request.args.get('from') + continue_from = parse_continue_from(continue_from_str) + nbr_of_weeks_str = request.args.get('weeksToLoad') + nbr_of_weeks = parse_nbr_of_weeks(nbr_of_weeks_str) + sort_direction = request.args.get('dir', 'desc') + pid = str2id(pid_path) + return _project_timeline(continue_from, nbr_of_weeks, sort_direction, pid) + + +def parse_continue_from(from_arg) -> typing.Optional[datetime]: + try: + from_float = float(from_arg) + except (TypeError, ValueError): + return None + return datetime.fromtimestamp(from_float, tz=bson.tz_util.utc) + + +def parse_nbr_of_weeks(weeks_to_load: str) -> int: + try: + return int(weeks_to_load) + except (TypeError, ValueError): + return 3 + + +def _global_timeline(continue_from: typing.Optional[datetime], nbr_of_weeks: int, sort_direction: str): + """Returns an aggregated view of what has happened on the site + Memoized in setup_app(). + + :param continue_from: Python utc timestamp where to begin aggregation + + :param nbr_of_weeks: Number of weeks to return + + Example output: + { + groups: [{ + label: 'Week 32', + groups: [{ + label: 'Spring', + url: '/p/spring', + items:{ + post: [blogPostDoc, blogPostDoc], + asset: [assetDoc, assetDoc] + }, + groups: ... + }] + }], + continue_from: 123456.2 // python timestamp + } + """ + builder = TimeLineBuilder( + SearchHelper(nbr_of_weeks, continue_from, _public_project_ids(), sort_direction), + ProjectGrouper + ) + return jsonify_timeline(builder.build()) + + +def jsonify_timeline(timeline: TimelineDO): + return jsonify( + attr.asdict(timeline, + recurse=True, + filter=lambda att, value: value is not None) + ) + + +def _project_timeline(continue_from: typing.Optional[datetime], nbr_of_weeks: int, sort_direction, pid: bson.ObjectId): + """Returns an aggregated view of what has happened on the site + Memoized in setup_app(). + + :param continue_from: Python utc timestamp where to begin aggregation + + :param nbr_of_weeks: Number of weeks to return + + Example output: + { + groups: [{ + label: 'Week 32', + groups: [{ + label: 'Tobias Johansson', + items:{ + post: [blogPostDoc, blogPostDoc], + asset: [assetDoc, assetDoc] + }, + groups: ... + }] + }], + continue_from: 123456.2 // python timestamp + } + """ + builder = TimeLineBuilder( + SearchHelper(nbr_of_weeks, continue_from, [pid], sort_direction), + UserGrouper + ) + return jsonify_timeline(builder.build()) + + +def setup_app(app, url_prefix): + global _public_project_ids + global _global_timeline + global _project_timeline + + app.register_api_blueprint(blueprint, url_prefix=url_prefix) + cached = app.cache.cached(timeout=3600) + _public_project_ids = cached(_public_project_ids) + memoize = app.cache.memoize(timeout=60) + _global_timeline = memoize(_global_timeline) + _project_timeline = memoize(_project_timeline) diff --git a/requirements.txt b/requirements.txt index 8e33836b..0d7b0104 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ # Primary requirements -r ../pillar-python-sdk/requirements.txt -attrs==16.2.0 +attrs==18.2.0 algoliasearch==1.12.0 bcrypt==3.1.3 blinker==1.4 diff --git a/src/scripts/js/es6/common/templates.js b/src/scripts/js/es6/common/templates.js new file mode 100644 index 00000000..ce66097c --- /dev/null +++ b/src/scripts/js/es6/common/templates.js @@ -0,0 +1 @@ +export { Nodes } from './templates/templates' \ No newline at end of file diff --git a/src/scripts/js/es6/common/templates/__tests__/assets.test.js b/src/scripts/js/es6/common/templates/__tests__/assets.test.js new file mode 100644 index 00000000..c8e52c87 --- /dev/null +++ b/src/scripts/js/es6/common/templates/__tests__/assets.test.js @@ -0,0 +1,115 @@ +import { Assets } from '../assets' +import {} from '' + +jest.useFakeTimers(); + +describe('Assets', () => { + describe('create$listItem', () => { + let nodeDoc; + let spyGet; + beforeEach(()=>{ + nodeDoc = { + _id: 'my-asset-id', + name: 'My Asset', + pretty_created: '2 hours ago', + node_type: 'asset', + project: { + name: 'My Project', + url: 'url-to-project' + }, + properties: { + content_type: 'image' + } + }; + + spyGet = spyOn($, 'get').and.callFake(function(url) { + let ajaxMock = $.Deferred(); + let response = { + variations: [{ + size: 'l', + link: 'wrong-img-link', + width: 150, + height: 170, + },{ + size: 'm', + link: 'img-link', + width: 50, + height: 70, + },{ + size: 's', + link: 'wrong-img-link', + width: 5, + height: 7, + }] + } + ajaxMock.resolve(response); + return ajaxMock.promise(); + }); + }); + describe('image content', () => { + test('node with picture', done => { + nodeDoc.picture = 'picture_id'; + let $card = Assets.create$listItem(nodeDoc); + jest.runAllTimers(); + expect($card.length).toEqual(1); + expect($card.prop('tagName')).toEqual('A'); + expect($card.hasClass('card asset')).toBeTruthy(); + expect($card.attr('href')).toEqual('/nodes/my-asset-id/redir'); + expect($card.attr('title')).toEqual('My Asset'); + + let $body = $card.find('.card-body'); + expect($body.length).toEqual(1); + + let $title = $body.find('.card-title'); + expect($title.length).toEqual(1); + + expect(spyGet).toHaveBeenCalledTimes(1); + expect(spyGet).toHaveBeenLastCalledWith('/api/files/picture_id'); + + let $image = $card.find('img'); + expect($image.length).toEqual(1); + + let $imageSubsititure = $card.find('.pi-asset'); + expect($imageSubsititure.length).toEqual(0); + + let $progress = $card.find('.progress'); + expect($progress.length).toEqual(0); + + let $watched = $card.find('.card-label'); + expect($watched.length).toEqual(0); + done(); + }); + + test('node without picture', done => { + let $card = Assets.create$listItem(nodeDoc); + expect($card.length).toEqual(1); + expect($card.prop('tagName')).toEqual('A'); + expect($card.hasClass('card asset')).toBeTruthy(); + expect($card.attr('href')).toEqual('/nodes/my-asset-id/redir'); + expect($card.attr('title')).toEqual('My Asset'); + + let $body = $card.find('.card-body'); + expect($body.length).toEqual(1); + + let $title = $body.find('.card-title'); + expect($title.length).toEqual(1); + + expect(spyGet).toHaveBeenCalledTimes(0); + + let $image = $card.find('img'); + expect($image.length).toEqual(0); + + let $imageSubsititure = $card.find('.pi-asset'); + expect($imageSubsititure.length).toEqual(1); + + let $progress = $card.find('.progress'); + expect($progress.length).toEqual(0); + + let $watched = $card.find('.card-label'); + expect($watched.length).toEqual(0); + done(); + }); + }); + }) +}); + diff --git a/src/scripts/js/es6/common/templates/assets.js b/src/scripts/js/es6/common/templates/assets.js new file mode 100644 index 00000000..693e919a --- /dev/null +++ b/src/scripts/js/es6/common/templates/assets.js @@ -0,0 +1,97 @@ +import { NodesFactoryInterface } from './nodes' +import { thenLoadImage, thenLoadVideoProgress } from './utils'; + +class Assets extends NodesFactoryInterface{ + static create$listItem(node) { + var markIfPublic = true; + let $card = $('') + .addClass('js-tagged-asset') + .attr('href', '/nodes/' + node._id + '/redir') + .attr('title', node.name); + + let $thumbnailContainer = $('
'); + + function warnNoPicture() { + let $cardIcon = $('
'); + $cardIcon.html(''); + $thumbnailContainer.append($cardIcon); + } + + if (!node.picture) { + warnNoPicture(); + } else { + $(window).trigger('pillar:workStart'); + + thenLoadImage(node.picture) + .fail(warnNoPicture) + .then((imgVariation)=>{ + let img = $('') + .attr('alt', node.name) + .attr('src', imgVariation.link) + .attr('width', imgVariation.width) + .attr('height', imgVariation.height); + $thumbnailContainer.append(img); + }) + .always(function(){ + $(window).trigger('pillar:workStop'); + }); + } + + $card.append($thumbnailContainer); + + /* Card body for title and meta info. */ + let $cardBody = $('
'); + let $cardTitle = $('
'); + $cardTitle.text(node.name); + $cardBody.append($cardTitle); + + let $cardMeta = $('