diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c0c5d8d13..a77d911b5 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -30,7 +30,7 @@ jobs: build: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: github branch run: | if [ "${{ github.event.release.target_commitish }}" != "" ]; then @@ -46,9 +46,10 @@ jobs: else echo "CLOWDER_VERSION=testing" >> $GITHUB_ENV fi - - uses: actions/setup-java@v1 + - uses: actions/setup-java@v3 with: - java-version: 1.8 + distribution: 'zulu' + java-version: 8 - name: Cache SBT ivy cache uses: actions/cache@v1 with: @@ -84,7 +85,7 @@ jobs: ports: - 27017:27017 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: github branch run: | if [ "${{ github.event.release.target_commitish }}" != "" ]; then @@ -100,16 +101,17 @@ jobs: else echo "CLOWDER_VERSION=testing" >> $GITHUB_ENV fi - - uses: actions/setup-java@v1 + - uses: actions/setup-java@v3 with: - java-version: 1.8 + distribution: 'zulu' + java-version: 8 - name: Cache SBT ivy cache - uses: actions/cache@v1 + uses: actions/cache@v3 with: path: ~/.ivy2/cache key: ${{ runner.os }}-sbt-ivy-cache-${{ hashFiles('project/Build.scala') }} - name: Cache SBT - uses: actions/cache@v1 + uses: actions/cache@v3 with: path: ~/.sbt key: ${{ runner.os }}-sbt-${{ hashFiles('project/Build.scala') }} @@ -128,7 +130,7 @@ jobs: runs-on: ubuntu-latest needs: build steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: github branch run: | if [ "${{ github.event.release.target_commitish }}" != "" ]; then @@ -144,16 +146,17 @@ jobs: else echo "CLOWDER_VERSION=testing" >> $GITHUB_ENV fi - - uses: actions/setup-java@v1 + - uses: actions/setup-java@v3 with: - java-version: 1.8 + distribution: 'zulu' + java-version: 8 - name: Cache SBT ivy cache - uses: actions/cache@v1 + uses: actions/cache@v3 with: path: ~/.ivy2/cache key: ${{ runner.os }}-sbt-ivy-cache-${{ hashFiles('project/Build.scala') }} - name: Cache SBT - uses: actions/cache@v1 + uses: actions/cache@v3 with: path: ~/.sbt key: ${{ runner.os }}-sbt-${{ hashFiles('project/Build.scala') }} @@ -204,7 +207,7 @@ jobs: runs-on: ubuntu-latest needs: build steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: github branch run: | if [ "${{ github.event.release.target_commitish }}" != "" ]; then @@ -220,16 +223,17 @@ jobs: else echo "CLOWDER_VERSION=testing" >> $GITHUB_ENV fi - - uses: actions/setup-java@v1 + - uses: actions/setup-java@v3 with: - java-version: 1.8 + distribution: 'zulu' + java-version: 8 - name: Cache SBT ivy cache - uses: actions/cache@v1 + uses: actions/cache@v3 with: path: ~/.ivy2/cache key: ${{ runner.os }}-sbt-ivy-cache-${{ hashFiles('project/Build.scala') }} - name: Cache SBT - uses: actions/cache@v1 + uses: actions/cache@v3 with: path: ~/.sbt key: ${{ runner.os }}-sbt-${{ hashFiles('project/Build.scala') }} diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 96964172d..7702ae180 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -29,6 +29,8 @@ env: jobs: docker: runs-on: ubuntu-latest + env: + dockerhub: ${{ secrets.DOCKERHUB_USERNAME }} permissions: packages: write strategy: @@ -95,7 +97,9 @@ jobs: push_tags="" for tag in ${tags}; do - push_tags="${push_tags}${{ env.DOCKERHUB_ORG }}/${{ matrix.IMAGE }}:${tag}," + if [ "${{ secrets.DOCKERHUB_USERNAME }}" != "" ]; then + push_tags="${push_tags}${{ env.DOCKERHUB_ORG }}/${{ matrix.IMAGE }}:${tag}," + fi push_tags="${push_tags}ghcr.io/${{ github.repository_owner }}/${{ matrix.IMAGE }}:${tag}," done push_tags="${push_tags%,*}" @@ -127,6 +131,7 @@ jobs: # login to registries - name: Login to DockerHub + if: env.dockerhub != '' uses: docker/login-action@v2 with: username: ${{ secrets.DOCKERHUB_USERNAME }} @@ -193,7 +198,7 @@ jobs: # update README at DockerHub - name: Docker Hub Description - if: matrix.README != '' && github.event_name == 'push' && github.repository == env.MASTER_REPO && env.BRANCH == 'master' + if: env.dockerhub != '' && matrix.README != '' && github.event_name == 'push' && github.repository == env.MASTER_REPO && env.BRANCH == 'master' uses: peter-evans/dockerhub-description@v2 env: DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} diff --git a/.github/workflows/swagger.yml b/.github/workflows/swagger.yml index b0f328a67..b55dfb26f 100644 --- a/.github/workflows/swagger.yml +++ b/.github/workflows/swagger.yml @@ -20,7 +20,7 @@ jobs: lint: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: openapi-lint uses: mbowman100/swagger-validator-action@master diff --git a/CHANGELOG.md b/CHANGELOG.md index dff5cd522..25fd9e4b6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,16 +5,64 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). +## Unreleased + +### Fixed + +- Search results are checked to verify nothing has been put in trash before display [#377](https://github.com/clowder-framework/clowder/issues/377) ## Unreleased ### Added +- Users can be marked as ReadOnly [#405](https://github.com/clowder-framework/clowder/issues/405) +- Added Trash button to delete section [#347](https://github.com/clowder-framework/clowder/issues/347) +- Add "when" parameter in a few GET API endpoints to enable pagination [#266](https://github.com/clowder-framework/clowder/issues/266) +- Extractors can now specify an extractor_key and an owner (email address) when sending a +registration or heartbeat to Clowder that will restrict use of that extractor to them. +- Added a dropdown menu to select all spaces, your spaces and also the spaces you have access to. [#374](https://github.com/clowder-framework/clowder/issues/374) +- Add SMTP_FROM in docker-compose yml file. [#417](https://github.com/clowder-framework/clowder/issues/417) +- Keycloak provider with secure social [#419](https://github.com/clowder-framework/clowder/issues/419) +- Documentation on how to do easy testing of pull requests +- Previewer source URL in the documentation to point to the Clowder GitHub repo. [#395](https://github.com/clowder-framework/clowder/issues/395) +- Added a citation.cff file +- Added Google's model viewer within viewer_three.js previewer + +### Fixed +- Updated lastModifiesDate when updating file or metadata to a dataset, added lastModified to UI [386](https://github.com/clowder-framework/clowder/issues/386) +- Disabled button while create dataset ajax call is still going on [#311](https://github.com/clowder-framework/clowder/issues/311) +- Changed default to 'Viewer' while inviting users to new spaces [#375](https://github.com/clowder-framework/clowder/issues/375) +- Fixed bug where complex JSON metadata objects using arrays were not being indexed properly for search. +- Fixed positioning problems related to how the 3D models appear on the screen + + +## 1.21.0 - 2022-08-23 + +**_Important:_** This update requires a MongoDB update schema due to a bug in the original migration of showing summary statistics at the +space level. Make sure to start the application with -DMONGOUPDATE=1. You can also run the [fixCounts.js](https://github.com/clowder-framework/clowder/blob/develop/scripts/updates/fix-counts.js) +script prior to upgrading to minimize the downtime. + +### Added +- api.Files jsonfile, adds two fields "downloads" and "views" [#228](https://github.com/clowder-framework/clowder/issues/228) +- Dataset and file scala.html pages incl schema.org jsonld metadata for (google)datasetsearch [#335](https://github.com/clowder-framework/clowder/issues/335) +- MiniUser and LicenseData now have to_jsonld methods to return string part of [#335](https://github.com/clowder-framework/clowder/issues/335) metadata +- LicenseData has urlViaAttributes used by it's to_jsonld to guess url when empty, for [#335](https://github.com/clowder-framework/clowder/issues/335) - MRI previewer for NIFTY (.nii) files. +- Dataset page usually defaults to Files tab, but if no files will now show Metadata first +- HEIC (.heic) and HEIF (.heif) mimetypes to support new Apple iPhone image file format. +- In the docker container the folder /home/clowder/data is now whitelisted by default for uploading by reference. + This can be changed using the environment variable CLOWDER_SOURCEPATH. +- The current CLA for developers of clowder. ### Fixed - Send email to all admins in a single email when a user submits 'Request access' for a space - Send email to all admins and request user in a single email when any admin accepts/rejects 'Request access' for a space [#330](https://github.com/clowder-framework/clowder/issues/330) -- Fixed positioning problems related to how the 3D models appear on the screen +- script/code to count space in files was not correct [#366](https://github.com/clowder-framework/clowder/issues/336) +- github actions would fail for docker builds due to secrets not existing +- Fix to remove dataset from a space [#349](https://github.com/clowder-framework/clowder/issues/349) + +### Changed +- Utils.baseURL now on RequestHeader instead of Request[Any] +- MongoDB Service log error:'Not all dataset IDs found for Dataset|Folder bulk get request', now incl all the IDs notFound ## 1.20.3 - 2022-06-10 @@ -49,6 +97,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/). - Documentation: Added "How to contribute documentation" page - Documentation: New Sphinx plugins for dropdowns and menus. + ## 1.20.0 - 2022-02-07 ### Added diff --git a/CLA.md b/CLA.md new file mode 100644 index 000000000..e5aa8fda6 --- /dev/null +++ b/CLA.md @@ -0,0 +1,28 @@ +# CLOWDER PROJECT Contributor License Agreement + +Thank you for your interest in contributing to the Clowder Project. In order to contribute, you will need to provide your name and contact information and sign this Clowder Project Contributor License Agreement, which sets for the terms and conditions of the intellectual property license granted with your contributions. + +This Clowder Project Contributor License Agreement (“Agreement”) is by and between you (any person or entity “You” or “Your”) and The Board of Trustees of the University of Illinois, through its National Center for Supercomputing Applications (“Illinois”). Please read this document carefully before signing and keep a copy for your records . By signing this Agreement or making a “Contribution” to the “Clowder Project” as defined below, You agree to the following: + +1. “Clowder Project” is an open-source project that aims to simplify the management of research data. Clowder provides tools to manage the full lifecycle of research data; scalable with respect to data size and extensible to the needs of different resea rch domains. +2. “Contribution” means all of Your contributions of object code, source code, and documentation and any modifications thereof to the Clowder Project. +3. “Licensed Patents” mean patent claims licensable by Contributor which are necessarily infringed by the making, using, selling, offering for sale, having made, import, or transfer of either its Contribution alone or when combined with the Clowder Project. +4. You represent that to the best of your knowledge the following: + 1. You are at least 18 years of age and have full power and authority to enter into this Agreement and to grant the rights in and to the Contribution as set forth herein (individuals who are under 18 years of age and who wish to contribute to the Clowder project may not enter into this Agreement, but may contact Clowder at clowder@lists.illinois.edu to explore alternatives); + 2. If your employer has rights to intellectual property that You create as part of the Contribution, You represent that you have obtained permission from Your employer to make Contributions on behalf of that employer or Your employer waived any rights in and to Your Contributions, or your employer authorizes the Contribution and agrees to be bound by the terms herein by signing as an entity below; + 3. That either: + 1. all documentation and code in the Contribution is Your original work and includes complete details of any thirdparty license and any other restriction (including, but not limited to related patents and trademarks) of which you are personally aware and which are associated with any part of Your Contributions; or + 2. any part of the Contribution that is not Your original creation is submitted to Clowder separately from any original Contribution, includes the complete details of its source and any corresponding license and any other restriction (including, but not limited to related patents, trademarks, and license agreements) of which you are personally aware, and is conspicuously marked as "Submitted on behalf of a third-party: [named here]". + 4. That Your Contribution does not include any viruses, worms, Trojan horses, malicious code or other harmful or destructive content; + 5. That You are not debarred from receiving services or other exports under U.S. law, including, without limitation, the Foreign Assets Control Regulations, 31 C.F.R. 500 et seq.; the Export Administration Regulations, 15 C.F.R. 730 et seq.; and the International Traffic in Arms Regulations, 22 C.F.R. 120 et seq. Persons who may not be eligible to receive services or exports under U.S. law include citizens and residents of countries subject to U.S. embargoes, and individuals specifically identified on the Specially Designated Nationals List, the Denied Persons List , the Arms Export Control Act Debarred Parties List, or any other list or General Order issued by the U.S. Department of the Treasury, Office of Foreign Assets Control; the U.S. Department of Commerce, Bureau of Industry and Security; the U.S. Department of State, Directorate of Defense Trade Controls; or any other agency with jurisdiction to issue debarment orders; and + 6. Your Contribution does not include any encryption technology and no government license or permission is required for the export, import, transfer or use of the Contribution. +5. You represent that the representations made herein are accurate and agree to notify Illinois of any facts or circumstances of which You become aware that would make any of Your representations inaccurate in any respect. +6. You hereby grant to Illinois and to recipients of the Clowder software distributed by Illinois (collectively, “Recipient”), a perpetual, irrevocable, non-exclusive, worldwide, royalty-free unrestricted license to use, reproduce, prepare derivative works of, publically display, publically perform, distribute, and sublicense the Contribution, and such derivative works, in source code and object code form. +7. You hereby grant to Recipient a perpetual, non-exclusive, worldwide, royalty-free patent license under Licensed Patents, if any, to make, have made, use, offer to sell, sell, import, and otherwise transfer Your Contribution in source code and object code form. This patent license shall apply to the combination of Your Contribution as part of the Clowder Project if, at the time the Contribution is added by You, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. If any Recipient institutes patent litigation against You or any other entity (including a cross-claim or counterclaim in a lawsuit) alleging that Your Contribution or Clowder Project (excluding combinations of the Clowder Project with other software or hardware) infringes Your Licensed Patent, then the patent license granted to such Recipient under this Agreement shall terminate. +8. Except for the rights granted to Recipients above, You reserve all right, title and interest in and to Your Contribution. You are not expected to provide support for your Contributions. +9. Subject to Your representations above, Your Contributions are provided on an “AS-IS” basis WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND (express or implied), including, without limitation, any implied warranty of merchantability and fitness for a particular purpose and any warranty of non-infringement. +10. Illinois, its trustees, directors, officers, employees, students and agents assume no liability in respect of any infringement of any copyright, patent or other right of third parties in connection with any Contributions, the Clowder Project, or Clowder software, and are not liable for any direct, indirect, punitive, special, incidental, consequential or exemplary damages arising in connection with any Contribution, the Clowder Project, or Clowder software. +11. INDEMNITY. You agree to indemnify and hold Clowder and its subsidiaries, affiliates, officers, agents, employees, partners and licensors harmless from any claim or demand, including but not limited to reasonable attorneys' fees, made by any third party due to or arising out of Contributions and Content you submit, post, transmit or otherwise make available through the Clowder Project, your violation of the Clowder Code of Conduct at http://clowderframework.org/, or your violation of any rights of another. +12. You agree that Illinois may assign this Agreement and Clowder Project to any third party. +13. The Clowder Project is under no obligation to accept and include every Contribution. +14. This Agreement is governed by the laws of the State of Illinois, excluding its conflict of laws provisions. diff --git a/TESTING.md b/TESTING.md new file mode 100644 index 000000000..fdfc36690 --- /dev/null +++ b/TESTING.md @@ -0,0 +1,82 @@ +# Testing Clowder PR + +Download the [docker-compose.yml](https://raw.githubusercontent.com/clowder-framework/clowder/develop/docker-compose.yml) file in a new folder. Next create a .env file with the following data: + +```ini +COMPOSE_PROJECT_NAME=clowder +TRAEFIK_HOST=Host:yourmachine.ncsa.illinois.edu; +TRAEFIK_HTTP_PORT=80 +TRAEFIK_HTTPS_PORT=443 +TRAEFIK_HTTPS_OPTIONS=TLS +TRAEFIK_ACME_ENABLE=true +TRAEFIK_ACME_EMAIL=youremail@ncsa.illinois.edu +TRAEFIK_HTTP_REDIRECT=Redirect.EntryPoint:https +CLOWDER_SSL=true +CLOWDER_ADMINS=youremail@ncsa.illinois.edu +``` + +Next create a docker-compose.override.yml file: + +```yaml +version: '3.5' + +services: + # point to the PR image (in this case PR-404) + clowder: + image: ghcr.io/clowder-framework/clowder:PR-404 + + # add any more extractors if you want + # extract preview image + imagepreview: + image: clowder/extractors-image-preview:latest + restart: unless-stopped + networks: + - clowder + depends_on: + rabbitmq: + condition: service_started + environment: + - RABBITMQ_URI=${RABBITMQ_URI:-amqp://guest:guest@rabbitmq/%2F} + + # extract image metadata + imagemetadata: + image: clowder/extractors-image-metadata:latest + restart: unless-stopped + networks: + - clowder + depends_on: + rabbitmq: + condition: service_started + environment: + - RABBITMQ_URI=${RABBITMQ_URI:-amqp://guest:guest@rabbitmq/%2F} + + # digest + digest: + image: clowder/extractors-digest:latest + restart: unless-stopped + networks: + - clowder + depends_on: + rabbitmq: + condition: service_started + environment: + - RABBITMQ_URI=${RABBITMQ_URI:-amqp://guest:guest@rabbitmq/%2F} +``` + +It is best practice to start with a `docker-compose pull` to make sure you have all the latest versions of the containers, followed by a `docker-compose up -d`. This will start all containers. You should be able to go to https://yourmachine.ncsa.illinois.edu. + +If this is the first time running the stack (or if you removed the mongo database), you will need to create the initial user again: + +```bash +docker run --rm -it \ + --network clowder_clowder \ + -e "FIRSTNAME=Admin" \ + -e "LASTNAME=User" \ + -e "ADMIN=true" \ + -e "PASSWORD=areallygoodpassword" \ + -e "EMAIL_ADDRESS=youremail@ncsa.illinois.edu" \ + -e "MONGO_URI=mongodb://mongo:27017/clowder" \ + clowder/mongo-init +``` + + diff --git a/app/api/Admin.scala b/app/api/Admin.scala index d8c878df0..27bc8f482 100644 --- a/app/api/Admin.scala +++ b/app/api/Admin.scala @@ -125,10 +125,10 @@ class Admin @Inject() (userService: UserService, list.foreach(id => userService.findById(UUID(id)) match { case Some(u: ClowderUser) => { - if (u.status == UserStatus.Inactive) { + if (u.status != UserStatus.Active) { userService.update(u.copy(status = UserStatus.Active)) - val subject = s"[${AppConfiguration.getDisplayName}] account activated" - val body = views.html.emails.userActivated(u, active = true)(request) + val subject = s"[${AppConfiguration.getDisplayName}] account is now active" + val body = views.html.emails.userChanged(u, "activated")(request) util.Mail.sendEmail(subject, request.user, u, body) } } @@ -138,10 +138,10 @@ class Admin @Inject() (userService: UserService, list.foreach(id => userService.findById(UUID(id)) match { case Some(u: ClowderUser) => { - if (!(u.status == UserStatus.Inactive)) { + if (u.status != UserStatus.Inactive) { userService.update(u.copy(status = UserStatus.Inactive)) - val subject = s"[${AppConfiguration.getDisplayName}] account deactivated" - val body = views.html.emails.userActivated(u, active = false)(request) + val subject = s"[${AppConfiguration.getDisplayName}] account is deactivated" + val body = views.html.emails.userChanged(u, "deactivated")(request) util.Mail.sendEmail(subject, request.user, u, body) } } @@ -150,26 +150,27 @@ class Admin @Inject() (userService: UserService, (request.body \ "admin").asOpt[List[String]].foreach(list => list.foreach(id => userService.findById(UUID(id)) match { - case Some(u: ClowderUser) if (u.status == UserStatus.Active) => { - - userService.update(u.copy(status = UserStatus.Admin)) - val subject = s"[${AppConfiguration.getDisplayName}] admin access granted" - val body = views.html.emails.userAdmin(u, admin = true)(request) - util.Mail.sendEmail(subject, request.user, u, body) - + case Some(u: ClowderUser) => { + if (u.status != UserStatus.Admin) { + userService.update(u.copy(status = UserStatus.Admin)) + val subject = s"[${AppConfiguration.getDisplayName}] account is now an admin" + val body = views.html.emails.userChanged(u, "an admin account")(request) + util.Mail.sendEmail(subject, request.user, u, body) + } } case _ => Logger.error(s"Could not update user with id=${id}") })) - (request.body \ "unadmin").asOpt[List[String]].foreach(list => + (request.body \ "readonly").asOpt[List[String]].foreach(list => list.foreach(id => userService.findById(UUID(id)) match { - case Some(u: ClowderUser) if (u.status == UserStatus.Admin) => { - userService.update(u.copy(status = UserStatus.Active)) - val subject = s"[${AppConfiguration.getDisplayName}] admin access revoked" - val body = views.html.emails.userAdmin(u, admin = false)(request) - util.Mail.sendEmail(subject, request.user, u, body) + case Some(u: ClowderUser) => { + if (u.status != UserStatus.ReadOnly) { + userService.update(u.copy(status = UserStatus.ReadOnly)) + val subject = s"[${AppConfiguration.getDisplayName}] account is now an read-only" + val body = views.html.emails.userChanged(u, "read-only")(request) + util.Mail.sendEmail(subject, request.user, u, body) + } } - case _ => Logger.error(s"Could not update user with id=${id}") })) Ok(toJson(Map("status" -> "success"))) diff --git a/app/api/ApiController.scala b/app/api/ApiController.scala index d996b6786..2a5c435b1 100644 --- a/app/api/ApiController.scala +++ b/app/api/ApiController.scala @@ -88,12 +88,13 @@ trait ApiController extends Controller { userRequest.user match { case Some(u) if !AppConfiguration.acceptedTermsOfServices(u.termsOfServices) => Future.successful(Unauthorized("Terms of Service not accepted")) case Some(u) if (u.status == UserStatus.Inactive) => Future.successful(Unauthorized("Account is not activated")) + case Some(u) if (u.status == UserStatus.ReadOnly && !api.Permission.READONLY.contains(permission) && permission != Permission.DownloadFiles) => Future.successful(Unauthorized("Account is ReadOnly")) case Some(u) if u.superAdminMode || Permission.checkPermission(userRequest.user, permission, resourceRef) => block(userRequest) case Some(u) => { affectedResource match { case Some(resource) if Permission.checkOwner(u, resource) => block(userRequest) case _ => Future.successful(Unauthorized("Not authorized")) - } + } } case None if Permission.checkPermission(userRequest.user, permission, resourceRef) => block(userRequest) case _ => Future.successful(Unauthorized("Not authorized")) diff --git a/app/api/Collections.scala b/app/api/Collections.scala index bbfb8ba29..f917b91b4 100644 --- a/app/api/Collections.scala +++ b/app/api/Collections.scala @@ -157,6 +157,10 @@ class Collections @Inject() (datasets: DatasetService, case Some(collection) => { val useTrash = play.api.Play.configuration.getBoolean("useTrash").getOrElse(false) if (!useTrash || (useTrash && collection.trash)){ + Logger.debug("Deleting collection from indexes " + collectionId) + current.plugin[ElasticsearchPlugin].foreach { + _.delete(collectionId.stringify) + } events.addObjectEvent(request.user , collection.id, collection.name, EventType.DELETE_COLLECTION.toString) collections.delete(collectionId) current.plugin[AdminsNotifierPlugin].foreach { diff --git a/app/api/Datasets.scala b/app/api/Datasets.scala index c0fe3fcbc..b59e67e8a 100644 --- a/app/api/Datasets.scala +++ b/app/api/Datasets.scala @@ -882,6 +882,7 @@ class Datasets @Inject()( datasets.index(id) Ok(toJson(Map("status" -> "success"))) + } case None => Logger.error(s"Error getting dataset $id"); NotFound(toJson(s"Error getting dataset $id")) } @@ -928,6 +929,7 @@ class Datasets @Inject()( events.addObjectEvent(request.user, id, x.name, EventType.ADD_METADATA_DATASET.toString) datasets.index(id) + Ok(toJson("Metadata successfully added to db")) } case e: JsError => { @@ -2039,6 +2041,11 @@ class Datasets @Inject()( def deleteDatasetHelper(id: UUID, request: UserRequest[AnyContent]) = { datasets.get(id) match { case Some(dataset) => { + Logger.debug("Deleting dataset from indexes " + id) + current.plugin[ElasticsearchPlugin].foreach { + _.delete(id.stringify) + } + //remove dataset from RDF triple store if triple store is used configuration.getString("userdfSPARQLStore").getOrElse("no") match { case "yes" => rdfsparql.removeDatasetFromGraphs(id) diff --git a/app/api/Extractions.scala b/app/api/Extractions.scala index 0ee8701c9..120b02472 100644 --- a/app/api/Extractions.scala +++ b/app/api/Extractions.scala @@ -127,36 +127,6 @@ class Extractions @Inject()( } } - /** - * - * Given a file id (UUID), submit this file for extraction - */ - def submitExtraction(id: UUID) = PermissionAction(Permission.ViewFile, Some(ResourceRef(ResourceRef.file, id)))(parse.json) { implicit request => - if (UUID.isValid(id.stringify)) { - files.get(id) match { - case Some(file) => { - // FIXME dataset not available? - routing.fileCreated(file, None, Utils.baseUrl(request).toString, request.apiKey) match { - case Some(jobId) => { - Ok(Json.obj("status" -> "OK", "job_id" -> jobId)) - } - case None => { - val message = "No jobId found for Extraction" - Logger.error(message) - InternalServerError(toJson(Map("status" -> "KO", "message" -> message))) - } - } - } - case None => { - Logger.error("Could not retrieve file that was just saved.") - InternalServerError("Error uploading file") - } - } //file match - } else { - BadRequest("Not valid id") - } - } - /** * For a given file id, checks for the status of all extractors processing that file. * REST endpoint GET /api/extractions/:id/status @@ -404,24 +374,24 @@ class Extractions @Inject()( Ok(jarr) } - def listExtractors(categories: List[String]) = AuthenticatedAction { implicit request => - Ok(Json.toJson(extractors.listExtractorsInfo(categories))) + def listExtractors(categories: List[String], space: Option[UUID]) = AuthenticatedAction { implicit request => + val userid = request.user.map(u => Some(u.id)).getOrElse(None) + Ok(Json.toJson(extractors.listExtractorsInfo(categories, userid))) } - def getExtractorInfo(extractorName: String) = AuthenticatedAction { implicit request => - extractors.getExtractorInfo(extractorName) match { + def getExtractorInfo(extractorName: String, extractor_key: Option[String]) = AuthenticatedAction { implicit request => + extractors.getExtractorInfo(extractorName, extractor_key, request.user) match { case Some(info) => Ok(Json.toJson(info)) case None => NotFound(Json.obj("status" -> "KO", "message" -> "Extractor info not found")) } } - def deleteExtractor(extractorName: String) = ServerAdminAction { implicit request => - extractors.deleteExtractor(extractorName) + def deleteExtractor(extractorName: String, extractor_key: Option[String]) = ServerAdminAction { implicit request => + extractors.deleteExtractor(extractorName, extractor_key) Ok(toJson(Map("status" -> "success"))) } - def addExtractorInfo() = AuthenticatedAction(parse.json) { implicit request => - + def addExtractorInfo(extractor_key: Option[String], user: Option[String]) = AuthenticatedAction(parse.json) { implicit request => // If repository is of type object, change it into an array. // This is for backward compatibility with requests from existing extractors. var requestJson = request.body \ "repository" match { @@ -438,34 +408,66 @@ class Extractions @Inject()( BadRequest(Json.obj("status" -> "KO", "message" -> JsError.toFlatJson(errors))) }, info => { - extractors.updateExtractorInfo(info) match { - case Some(u) => { - // Create/assign any default labels for this extractor - u.defaultLabels.foreach(labelStr => { - val segments = labelStr.split("/") - val (labelName, labelCategory) = if (segments.length > 1) { - (segments(1), segments(0)) - } else { - (segments(0), "Other") + // Check private extractor flags + val submissionInfo: Option[ExtractorInfo] = extractor_key match { + case Some(ek) => { + user match { + case None => { + Logger.error("Extractors with a private key must also specify a user email.") + None } - extractors.getExtractorsLabel(labelName) match { - case None => { - // Label does not exist - create and assign it - val createdLabel = extractors.createExtractorsLabel(labelName, Some(labelCategory), List[String](u.name)) - } - case Some(lbl) => { - // Label already exists, assign it - if (!lbl.extractors.contains(u.name)) { - val label = ExtractorsLabel(lbl.id, lbl.name, lbl.category, lbl.extractors ++ List[String](u.name)) - val updatedLabel = extractors.updateExtractorsLabel(label) + case Some(userEmail) => { + userservice.findByEmail(userEmail) match { + case Some(u) => { + val perms = List(new ResourceRef('user, u.id)) + Some(info.copy(unique_key=Some(ek), permissions=perms)) + } + case None => { + Logger.error("No user found with email "+userEmail) + None } } } - }) + } + } + case None => Some(info) + } + + // TODO: Check user permissions if the extractor_key has already been registered + + submissionInfo match { + case None => BadRequest("Extractors with a private key must also specify a non-anonymous user.") + case Some(subInfo) => { + extractors.updateExtractorInfo(subInfo) match { + case Some(u) => { + // Create/assign any default labels for this extractor + u.defaultLabels.foreach(labelStr => { + val segments = labelStr.split("/") + val (labelName, labelCategory) = if (segments.length > 1) { + (segments(1), segments(0)) + } else { + (segments(0), "Other") + } + extractors.getExtractorsLabel(labelName) match { + case None => { + // Label does not exist - create and assign it + val createdLabel = extractors.createExtractorsLabel(labelName, Some(labelCategory), List[String](u.name)) + } + case Some(lbl) => { + // Label already exists, assign it + if (!lbl.extractors.contains(u.name)) { + val label = ExtractorsLabel(lbl.id, lbl.name, lbl.category, lbl.extractors ++ List[String](u.name)) + val updatedLabel = extractors.updateExtractorsLabel(label) + } + } + } + }) - Ok(Json.obj("status" -> "OK", "message" -> ("Extractor info updated. ID = " + u.id))) + Ok(Json.obj("status" -> "OK", "message" -> ("Extractor info updated. ID = " + u.id))) + } + case None => BadRequest(Json.obj("status" -> "KO", "message" -> "Error updating extractor info")) + } } - case None => BadRequest(Json.obj("status" -> "KO", "message" -> "Error updating extractor info")) } } ) @@ -518,11 +520,14 @@ class Extractions @Inject()( } // if extractor_id is not specified default to execution of all extractors matching mime type (request.body \ "extractor").asOpt[String] match { - case Some(extractorId) => + case Some(extractorId) => { + val extractorKey = (request.body \ "extractor").asOpt[String] + extractors.getExtractorInfo(extractorId, extractorKey, request.user) val job_id = routing.submitFileManually(new UUID(originalId), file, Utils.baseUrl(request), extractorId, extra, datasetId, newFlags, request.apiKey, request.user) sink.logSubmitFileToExtractorEvent(file, extractorId, request.user) Ok(Json.obj("status" -> "OK", "job_id" -> job_id)) + } case None => { routing.fileCreated(file, None, Utils.baseUrl(request).toString, request.apiKey) match { case Some(job_id) => { diff --git a/app/api/Files.scala b/app/api/Files.scala index 6aba93af3..db737dfde 100644 --- a/app/api/Files.scala +++ b/app/api/Files.scala @@ -716,6 +716,12 @@ class Files @Inject()( } def jsonFile(file: File, serverAdmin: Boolean = false): JsValue = { + val foldersContainingFile = folders.findByFileId(file.id) + val allPaths: List[List[String]] = (for (folder <- foldersContainingFile) yield (folderPath(folder, List()).tail)) + var path_str = allPaths.map(xl => "/" + xl.map(x => x.toString()).mkString("/")).mkString("") + if(path_str == "") { + path_str = "/" + } val defaultMap = Map( "id" -> file.id.toString, "filename" -> file.filename, @@ -725,7 +731,11 @@ class Files @Inject()( "size" -> file.length.toString, "thumbnail" -> file.thumbnail_id.orNull, "authorId" -> file.author.id.stringify, - "status" -> file.status) + "status" -> file.status, + "views" -> file.stats.views.toString(), + "downloads" -> file.stats.downloads.toString(), + "path" -> path_str + ) // Only include filepath if using DiskByte storage and user is serverAdmin val jsonMap = file.loader match { @@ -1687,6 +1697,9 @@ class Files @Inject()( current.plugin[VersusPlugin].foreach { _.removeFromIndexes(id) } + current.plugin[ElasticsearchPlugin].foreach { + _.delete(id.stringify) + } Logger.debug("Deleting file: " + file.filename) files.removeFile(id, Utils.baseUrl(request), request.apiKey, request.user) diff --git a/app/api/Metadata.scala b/app/api/Metadata.scala index 1c2be48fb..1dec8f676 100644 --- a/app/api/Metadata.scala +++ b/app/api/Metadata.scala @@ -257,7 +257,7 @@ class Metadata @Inject() ( // Given a list of terms, create a new standard vocabulary from the list // Expects a JSON array of Strings as the request body - def createVocabulary() = AuthenticatedAction(parse.json) { + def createVocabulary() = PermissionAction(Permission.CreateVocabulary)(parse.json) { implicit request => request.user match { case None => BadRequest(toJson("Invalid user")) @@ -278,7 +278,7 @@ class Metadata @Inject() ( // Given an ID, replace the entire terms list of a standard vocabulary // Expects a JSON array of Strings as the request body - def updateVocabulary(id: UUID) = AuthenticatedAction(parse.json) { + def updateVocabulary(id: UUID) = PermissionAction(Permission.EditVocabulary)(parse.json) { implicit request => request.user match { case None => BadRequest(toJson("Invalid user")) @@ -304,7 +304,7 @@ class Metadata @Inject() ( } // Given an ID, delete the standard vocabulary with that ID - def deleteVocabulary(id: UUID) = AuthenticatedAction(parse.empty) { + def deleteVocabulary(id: UUID) = PermissionAction(Permission.DeleteVocabulary)(parse.empty) { implicit request => request.user match { case None => BadRequest(toJson("Invalid user")) @@ -341,7 +341,7 @@ class Metadata @Inject() ( } } - def editDefinition(id: UUID, spaceId: Option[String]) = AuthenticatedAction(parse.json) { + def editDefinition(id: UUID, spaceId: Option[String]) = PermissionAction(Permission.EditVocabulary)(parse.json) { implicit request => request.user match { case Some(user) => { @@ -387,7 +387,7 @@ class Metadata @Inject() ( } } - def deleteDefinition(id: UUID) = AuthenticatedAction { implicit request => + def deleteDefinition(id: UUID) = PermissionAction(Permission.CreateVocabulary) { implicit request => implicit val user = request.user user match { case Some(user) => { diff --git a/app/api/Permissions.scala b/app/api/Permissions.scala index ac902a31d..e3c6ca5c3 100644 --- a/app/api/Permissions.scala +++ b/app/api/Permissions.scala @@ -426,6 +426,7 @@ object Permission extends Enumeration { def checkPermission(user: User, permission: Permission, resourceRef: ResourceRef): Boolean = { // check if user is owner, in that case they can do what they want. if (user.superAdminMode) return true + if (user.status == UserStatus.ReadOnly && !READONLY.contains(permission) && permission != Permission.DownloadFiles) return false if (checkOwner(users.findByIdentity(user), resourceRef)) return true resourceRef match { diff --git a/app/api/Spaces.scala b/app/api/Spaces.scala index 8dd34edda..e48de645b 100644 --- a/app/api/Spaces.scala +++ b/app/api/Spaces.scala @@ -32,35 +32,39 @@ class Spaces @Inject()(spaces: SpaceService, val spaceTitle: String = Messages("space.title") //TODO- Minimal Space created with Name and description. URLs are not yet put in - def createSpace() = AuthenticatedAction(parse.json) { implicit request => + def createSpace() = PermissionAction(Permission.CreateSpace)(parse.json) { implicit request => Logger.debug("Creating new space") - val nameOpt = (request.body \ "name").asOpt[String] - val descOpt = (request.body \ "description").asOpt[String] - (nameOpt, descOpt) match { - case (Some(name), Some(description)) => { - // TODO: add creator - val userId = request.user.get.id - val c = ProjectSpace(name = name, description = description, created = new Date(), creator = userId, - homePage = List.empty, logoURL = None, bannerURL = None, collectionCount = 0, - datasetCount = 0, fileCount = 0, userCount = 0, spaceBytes = 0, metadata = List.empty) - spaces.insert(c) match { - case Some(id) => { - appConfig.incrementCount('spaces, 1) - events.addObjectEvent(request.user, c.id, c.name, "create_space") - userService.findRoleByName("Admin") match { - case Some(realRole) => { - spaces.addUser(userId, realRole, UUID(id)) - } - case None => Logger.info("No admin role found") + if(request.user.get.status == UserStatus.ReadOnly) { + BadRequest(toJson("User is Read-Only")) + } else { + val nameOpt = (request.body \ "name").asOpt[String] + val descOpt = (request.body \ "description").asOpt[String] + (nameOpt, descOpt) match { + case (Some(name), Some(description)) => { + // TODO: add creator + val userId = request.user.get.id + val c = ProjectSpace(name = name, description = description, created = new Date(), creator = userId, + homePage = List.empty, logoURL = None, bannerURL = None, collectionCount = 0, + datasetCount = 0, fileCount = 0, userCount = 0, spaceBytes = 0, metadata = List.empty) + spaces.insert(c) match { + case Some(id) => { + appConfig.incrementCount('spaces, 1) + events.addObjectEvent(request.user, c.id, c.name, "create_space") + userService.findRoleByName("Admin") match { + case Some(realRole) => { + spaces.addUser(userId, realRole, UUID(id)) + } + case None => Logger.info("No admin role found") + } + Ok(toJson(Map("id" -> id))) } - Ok(toJson(Map("id" -> id))) + case None => Ok(toJson(Map("status" -> "error"))) } - case None => Ok(toJson(Map("status" -> "error"))) - } + } + case (_, _) => BadRequest(toJson("Missing required parameters")) } - case (_, _) => BadRequest(toJson("Missing required parameters")) } } diff --git a/app/controllers/Datasets.scala b/app/controllers/Datasets.scala index 0950d07ce..4304d4be5 100644 --- a/app/controllers/Datasets.scala +++ b/app/controllers/Datasets.scala @@ -8,7 +8,8 @@ import play.api.Logger import play.api.Play.current import play.api.libs.json.Json._ import services._ -import util.{FileUtils, Formatters, RequiredFieldsConfig, SortingUtils } +import util.{FileUtils, Formatters, RequiredFieldsConfig, SortingUtils} + import scala.collection.immutable._ import scala.collection.mutable.ListBuffer import play.api.i18n.Messages @@ -728,7 +729,6 @@ class Datasets @Inject() ( implicit val user = request.user Logger.debug("------- in Datasets.submit ---------") - val folder = folderId.flatMap(id => folders.get(UUID(id))) val retMap = request.body.asFormUrlEncoded.get("datasetid").flatMap(_.headOption) match { case Some(ds) => { @@ -836,4 +836,4 @@ class Datasets @Inject() ( implicit val user = request.user Ok(views.html.generalMetadataSearch()) } -} \ No newline at end of file +} diff --git a/app/controllers/Extractors.scala b/app/controllers/Extractors.scala index b3e63e948..664fcbc3a 100644 --- a/app/controllers/Extractors.scala +++ b/app/controllers/Extractors.scala @@ -39,9 +39,9 @@ class Extractors @Inject() (extractions: ExtractionService, /** * Gets a map of all updates from all jobs given to this extractor. */ - def showJobHistory(extractorName: String) = AuthenticatedAction { implicit request => + def showJobHistory(extractorName: String, extractor_key: Option[String]) = AuthenticatedAction { implicit request => implicit val user = request.user - extractorService.getExtractorInfo(extractorName) match { + extractorService.getExtractorInfo(extractorName, extractor_key, user) match { case None => NotFound(s"No extractor found with name=${extractorName}") case Some(info) => { val allExtractions = extractions.findAll() @@ -56,9 +56,10 @@ class Extractors @Inject() (extractions: ExtractionService, */ def selectExtractors() = AuthenticatedAction { implicit request => implicit val user = request.user - + val userid = request.user.map(u => Some(u.id)).getOrElse(None) // Filter extractors by user filters necessary - var runningExtractors: List[ExtractorInfo] = extractorService.listExtractorsInfo(List.empty) + // TODO: Filter by multiple spaces + var runningExtractors: List[ExtractorInfo] = extractorService.listExtractorsInfo(List.empty, userid) val selectedExtractors: List[String] = extractorService.getEnabledExtractors() val groups = extractions.groupByType(extractions.findAll()) val allLabels = extractorService.listExtractorsLabels() @@ -166,7 +167,7 @@ class Extractors @Inject() (extractions: ExtractionService, def manageLabels = ServerAdminAction { implicit request => implicit val user = request.user val categories = List[String]("EXTRACT") - val extractors = extractorService.listExtractorsInfo(categories) + val extractors = extractorService.listExtractorsInfo(categories, None) val labels = extractorService.listExtractorsLabels() Ok(views.html.extractorLabels(labels, extractors)) @@ -211,7 +212,8 @@ class Extractors @Inject() (extractions: ExtractionService, def showExtractorInfo(extractorName: String) = AuthenticatedAction { implicit request => implicit val user = request.user - val targetExtractor = extractorService.listExtractorsInfo(List.empty).find(p => p.name == extractorName) + val userid = request.user.map(u => Some(u.id)).getOrElse(None) + val targetExtractor = extractorService.listExtractorsInfo(List.empty, userid).find(p => p.name == extractorName) targetExtractor match { case Some(extractor) => { val labels = extractorService.getLabelsForExtractor(extractor.name) @@ -223,6 +225,7 @@ class Extractors @Inject() (extractions: ExtractionService, def showExtractorMetrics(extractorName: String) = AuthenticatedAction { implicit request => implicit val user = request.user + val userid = request.user.map(u => Some(u.id)).getOrElse(None) val dateFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS") val todaydate = dateFormatter.format(new java.util.Date()) @@ -299,7 +302,7 @@ class Extractors @Inject() (extractions: ExtractionService, } Logger.warn("last 10 average: " + lastTenAverage) - val targetExtractor = extractorService.listExtractorsInfo(List.empty).find(p => p.name == extractorName) + val targetExtractor = extractorService.listExtractorsInfo(List.empty, userid).find(p => p.name == extractorName) targetExtractor match { case Some(extractor) => Ok(views.html.extractorMetrics(extractorName, average.toString, lastTenAverage.toString, lastweeksubmitted, lastmonthsubmitted)) case None => InternalServerError("Extractor Info not found: " + extractorName) @@ -308,11 +311,19 @@ class Extractors @Inject() (extractions: ExtractionService, def submitFileExtraction(file_id: UUID) = PermissionAction(Permission.EditFile, Some(ResourceRef(ResourceRef.file, file_id))) { implicit request => implicit val user = request.user - val all_extractors = extractorService.listExtractorsInfo(List("EXTRACT", "CONVERT")) - val extractors = all_extractors.filter(!_.process.file.isEmpty) + val userid = request.user.map(u => Some(u.id)).getOrElse(None) fileService.get(file_id) match { - case Some(file) => { + val all_extractors = extractorService.listExtractorsInfo(List("EXTRACT", "CONVERT"), userid) + var extractors = all_extractors.filter(!_.process.file.isEmpty) + + val user_extra = userid match { + case Some(uid) => all_extractors.filter(_.permissions.contains(ResourceRef('user, uid))) + case None => List.empty + } + + extractors = (extractors ++ user_extra).distinct + val foldersContainingFile = folders.findByFileId(file.id).sortBy(_.name) var folderHierarchy = new ListBuffer[Folder]() if(foldersContainingFile.length > 0) { @@ -352,7 +363,8 @@ class Extractors @Inject() (extractions: ExtractionService, def submitSelectedExtractions(ds_id: UUID) = PermissionAction(Permission.EditDataset, Some(ResourceRef(ResourceRef.dataset, ds_id))) { implicit request => implicit val user = request.user - val all_extractors = extractorService.listExtractorsInfo(List("EXTRACT", "CONVERT")) + val userid = request.user.map(u => Some(u.id)).getOrElse(None) + val all_extractors = extractorService.listExtractorsInfo(List("EXTRACT", "CONVERT"), userid) val extractors = all_extractors.filter(!_.process.file.isEmpty) datasets.get(ds_id) match { case Some(dataset) => { @@ -372,10 +384,13 @@ class Extractors @Inject() (extractions: ExtractionService, def submitDatasetExtraction(ds_id: UUID) = PermissionAction(Permission.EditDataset, Some(ResourceRef(ResourceRef.dataset, ds_id))) { implicit request => implicit val user = request.user - val all_extractors = extractorService.listExtractorsInfo(List("EXTRACT", "CONVERT")) - val extractors = all_extractors.filter(!_.process.dataset.isEmpty) + val userid = request.user.map(u => Some(u.id)).getOrElse(None) datasetService.get(ds_id) match { - case Some(ds) => Ok(views.html.extractions.submitDatasetExtraction(extractors, ds)) + case Some(ds) => { + val all_extractors = extractorService.listExtractorsInfo(List("EXTRACT", "CONVERT"), userid) + val extractors = all_extractors.filter(!_.process.dataset.isEmpty) + Ok(views.html.extractions.submitDatasetExtraction(extractors, ds)) + } case None => InternalServerError("Dataset not found") } } diff --git a/app/controllers/SecuredController.scala b/app/controllers/SecuredController.scala index 0cbedeec3..ce4dae8c6 100644 --- a/app/controllers/SecuredController.scala +++ b/app/controllers/SecuredController.scala @@ -104,6 +104,9 @@ trait SecuredController extends Controller { userRequest.user match { case Some(u) if !AppConfiguration.acceptedTermsOfServices(u.termsOfServices) => Future.successful(Results.Redirect(routes.Application.tos(Some(request.uri)))) case Some(u) if (u.status==UserStatus.Inactive) => Future.successful(Results.Redirect(routes.Error.notActivated())) + case Some(u) if (u.status==UserStatus.ReadOnly && !api.Permission.READONLY.contains(permission) && permission != Permission.DownloadFiles) => { + Future.successful(Results.Redirect(routes.Error.notAuthorized("Account is ReadOnly", "", ""))) + } case Some(u) if u.superAdminMode || Permission.checkPermission(userRequest.user, permission, resourceRef) => block(userRequest) case Some(u) => notAuthorizedMessage(userRequest.user, resourceRef) case None if Permission.checkPermission(userRequest.user, permission, resourceRef) => block(userRequest) diff --git a/app/controllers/Spaces.scala b/app/controllers/Spaces.scala index a8388812f..1c197060a 100644 --- a/app/controllers/Spaces.scala +++ b/app/controllers/Spaces.scala @@ -83,10 +83,11 @@ class Spaces @Inject() (spaces: SpaceService, users: UserService, events: EventS def selectExtractors(id: UUID) = AuthenticatedAction { implicit request => implicit val user = request.user + val userid = request.user.map(u => Some(u.id)).getOrElse(None) spaces.get(id) match { case Some(s) => { // get list of registered extractors - val runningExtractors: List[ExtractorInfo] = extractors.listExtractorsInfo(List.empty) + val runningExtractors: List[ExtractorInfo] = extractors.listExtractorsInfo(List.empty, userid) // list of extractors enabled globally val globalSelections: List[String] = extractors.getEnabledExtractors() // get list of extractors registered with a specific space @@ -221,7 +222,7 @@ class Spaces @Inject() (spaces: SpaceService, users: UserService, events: EventS } } - def newSpace() = AuthenticatedAction { implicit request => + def newSpace() = PermissionAction(Permission.CreateSpace) { implicit request => implicit val user = request.user Ok(views.html.spaces.newSpace(spaceForm)) } @@ -395,10 +396,10 @@ class Spaces @Inject() (spaces: SpaceService, users: UserService, events: EventS * Submit action for new or edit space */ // TODO this should check to see if user has editspace for specific space - def submit() = AuthenticatedAction { implicit request => + def submit() = PermissionAction(Permission.CreateSpace) { implicit request => implicit val user = request.user user match { - case Some(identity) => { + case Some(identity) if identity.status != UserStatus.ReadOnly => { val userId = request.user.get.id //need to get the submitValue before binding form data, in case of errors we want to trigger different forms request.body.asMultipartFormData.get.dataParts.get("submitValue").headOption match { @@ -482,7 +483,7 @@ class Spaces @Inject() (spaces: SpaceService, users: UserService, events: EventS case None => { BadRequest("Did not get any submit button value.") } } } //some identity - case None => Redirect(routes.Spaces.list()).flashing("error" -> "You are not authorized to create/edit $spaceTitle.") + case _ => Redirect(routes.Spaces.list()).flashing("error" -> "You are not authorized to create/edit $spaceTitle.") } } def followingSpaces(index: Int, limit: Int, mode: String) = PrivateServerAction { implicit request => diff --git a/app/controllers/Utils.scala b/app/controllers/Utils.scala index 8eda878fb..2d599a78e 100644 --- a/app/controllers/Utils.scala +++ b/app/controllers/Utils.scala @@ -13,12 +13,13 @@ object Utils { /** * Return base url given a request. This will add http or https to the front, for example * https://localhost:9443 will be returned if it is using https. + * */ - def baseUrl(request: Request[Any], absolute: Boolean = true) = { + def baseUrl(request: RequestHeader, absolute: Boolean = true) = { if (absolute) { - routes.Files.list().absoluteURL(https(request))(request).replace("/files", "") + routes.Files.list().absoluteURL(https(request))(request).replace("/files", "") } else { - routes.Files.list().url.replace("/files", "") + routes.Files.list().url.replace("/files", "") } } @@ -171,4 +172,4 @@ object Utils { decodedReplies.toList } } -} \ No newline at end of file +} diff --git a/app/models/Dataset.scala b/app/models/Dataset.scala index 11953197c..4c682963c 100644 --- a/app/models/Dataset.scala +++ b/app/models/Dataset.scala @@ -5,6 +5,7 @@ import java.util.Date import play.api.libs.json.{Writes, Json} import play.api.libs.json._ import play.api.libs.functional.syntax._ +import _root_.util.Formatters /** * A dataset is a collection of files, and streams. @@ -38,6 +39,53 @@ case class Dataset( def isDefault:Boolean = status == DatasetStatus.DEFAULT.toString def isTRIAL:Boolean = status == DatasetStatus.TRIAL.toString def inSpace:Boolean = spaces.size > 0 + + /** + * Caps a list at 'max' + * then turns it's ID's into resolvable URLs of that 'apiRoute' type + * end with appending "..." to the List, to signify that it was abridged + * + * todo: issue 354 to the max configurable + */ + def cap_api_list (l: List[UUID], max: Int, URLb: String, apiRoute: String) : List[String] = { + if (l.length <= max) { + return l.map(f => URLb + apiRoute + f) + } else { + val cl = l.take(max) + val r : List[String] = cl.map(f => URLb + apiRoute + f) + return r.::("...").reverse + } + } + + /** + * return Dataset as JsValue in jsonld format + */ + def to_jsonld(url: String) : JsValue = { + val so = JsObject(Seq("@vocab" -> JsString("https://schema.org/"))) + val URLb = url.replaceAll("/$", "") + var pic_id = thumbnail_id.getOrElse("") + if (pic_id != "") { + pic_id = URLb + pic_id + } else { + "" + } + val datasetLD = Json.obj( + "@context" -> so, + "identifier" -> id.toString, + "name" -> name, + "author" -> author.to_jsonld(), + "description" -> description, + "dateCreated" -> Formatters.iso8601(created), + "DigitalDocument" -> Json.toJson(cap_api_list(files, 10, URLb, "/files/")), + "Collection" -> Json.toJson(cap_api_list(spaces, 10, URLb, "/spaces/")), + "thumbnail" -> Json.toJson(pic_id), + "license" -> licenseData.to_jsonld(), + "dateModfied" -> Formatters.iso8601(lastModifiedDate), + "keywords" -> tags.map(x => x.to_json()), + "creator" -> Json.toJson(creators) + ) + return datasetLD + } } object DatasetStatus extends Enumeration { @@ -68,8 +116,9 @@ object Dataset { } + case class DatasetAccess( showAccess: Boolean = false, access: String = "N/A", accessOptions: List[String] = List.empty -) \ No newline at end of file +) diff --git a/app/models/Extraction.scala b/app/models/Extraction.scala index e6b35b503..d38fe3b34 100644 --- a/app/models/Extraction.scala +++ b/app/models/Extraction.scala @@ -78,6 +78,7 @@ case class ExtractorDetail( * * @param id id internal to the system * @param name lower case, no spaces, can use dashes + * @param uniqueName name+suffix to uniquely identify extractor for private use e.g. clowder.extractor.v2.johndoe123 * @param version the version, for example 1.3.5 * @param updated date when this information was last updated * @param description short description of what the extractor does @@ -117,7 +118,9 @@ case class ExtractorInfo( defaultLabels: List[String] = List[String](), process: ExtractorProcessTriggers = new ExtractorProcessTriggers(), categories: List[String] = List[String](ExtractorCategory.EXTRACT.toString), - parameters: JsValue = JsObject(Seq()) + parameters: JsValue = JsObject(Seq()), + unique_key: Option[String] = None, + permissions: List[ResourceRef] =List[ResourceRef]() ) /** what are the categories of the extractor? @@ -170,7 +173,9 @@ object ExtractorInfo { (JsPath \ "labels").read[List[String]].orElse(Reads.pure(List.empty)) and (JsPath \ "process").read[ExtractorProcessTriggers].orElse(Reads.pure(new ExtractorProcessTriggers())) and (JsPath \ "categories").read[List[String]].orElse(Reads.pure(List[String](ExtractorCategory.EXTRACT.toString))) and - (JsPath \ "parameters").read[JsValue].orElse(Reads.pure(JsObject(Seq()))) + (JsPath \ "parameters").read[JsValue].orElse(Reads.pure(JsObject(Seq()))) and + (JsPath \ "unique_key").read[Option[String]].orElse(Reads.pure(None)) and + (JsPath \ "permissions").read[List[ResourceRef]].orElse(Reads.pure(List.empty)) )(ExtractorInfo.apply _) } diff --git a/app/models/File.scala b/app/models/File.scala index badb5c58e..765fa7d2f 100644 --- a/app/models/File.scala +++ b/app/models/File.scala @@ -4,6 +4,8 @@ import java.util.Date import models.FileStatus.FileStatus import play.api.libs.json.{JsObject, Json, Writes} +import play.api.libs.json._ +import _root_.util.Formatters /** * Uploaded files. @@ -32,7 +34,33 @@ case class File( licenseData: LicenseData = new LicenseData(), followers: List[UUID] = List.empty, stats: Statistics = new Statistics(), - status: String = FileStatus.UNKNOWN.toString) // can't use enums in salat + status: String = FileStatus.UNKNOWN.toString) { // can't use enums in salat + /** + * return File as JsValue in jsonld format + */ + def to_jsonld() : JsValue = { + val so = JsObject(Seq("@vocab" -> JsString("https://schema.org/"))) + val fileLD = Json.obj( + "@context" -> so, + "identifier" -> id.toString, + "name" -> filename, + "author" -> author.to_jsonld(), + "isBasedOn" -> originalname, + "uploadDate" -> Formatters.iso8601(uploadDate), + "contentType" -> contentType, + "MenuSection" -> sections.map(x => x.to_jsonld()), + "keywords" -> tags.map(x => x.to_json()), + "thumbnail" -> Json.toJson(thumbnail_id.filterNot(_.isEmpty).getOrElse("")), + "description" -> description, + "license" -> licenseData.to_jsonld(), + "FollowAction" -> Json.toJson(followers), + "interactionStatistic" -> stats.to_jsonld, + "status" -> status + ) + return fileLD + } +} + // what is the status of the file object FileStatus extends Enumeration { diff --git a/app/models/LicenseData.scala b/app/models/LicenseData.scala index 6434b868c..5e7ec46df 100644 --- a/app/models/LicenseData.scala +++ b/app/models/LicenseData.scala @@ -2,6 +2,9 @@ package models import api.Permission +import play.api.libs.json._ + + /** * case class to handle specific license information. Currently attached to individual Datasets and Files. */ @@ -41,5 +44,67 @@ case class LicenseData ( def isRightsOwner(aName: String) = { m_rightsHolder == aName } -} + /** + * Utility to return a url even if empty, but enough other attributes available to determine it + * this is repurposed from: + * function updateData(id, imageBase, sourceObject, authorName) + * in updateLicenseInfo.js line:88 + */ + def urlViaAttributes() : String = { + if (m_licenseUrl != "") return m_licenseUrl + var licenseUrl = m_licenseUrl; + if (m_licenseType == "license2") { + //No checkboxes selected + if (!m_ccAllowCommercial && !m_ccAllowDerivative && !m_ccRequireShareAlike) { + licenseUrl = "http://creativecommons.org/licenses/by-nc-nd/3.0/"; + } + //Only commercial selected + else if (m_ccAllowCommercial && !m_ccAllowDerivative && !m_ccRequireShareAlike) { + licenseUrl = "http://creativecommons.org/licenses/by-nd/3.0/"; + } + //Only remixing selected + else if (!m_ccAllowCommercial && m_ccAllowDerivative && !m_ccRequireShareAlike) { + licenseUrl = "http://creativecommons.org/licenses/by-nc/3.0/"; + } + //Remixing and Sharealike selected + else if (!m_ccAllowCommercial && m_ccAllowDerivative && m_ccRequireShareAlike) { + licenseUrl = "http://creativecommons.org/licenses/by-nc-sa/3.0/"; + } + //All checkboxes selected + else if (m_ccAllowCommercial && m_ccAllowDerivative && m_ccRequireShareAlike) { + licenseUrl = "http://creativecommons.org/licenses/by-sa/3.0/"; + } + //Commercial and Remixing selected + else if (m_ccAllowCommercial && m_ccAllowDerivative && !m_ccRequireShareAlike) { + licenseUrl = "http://creativecommons.org/licenses/by/3.0/"; + } + //else { rightsHolder = 'Creative Commons'; + // licenseText = 'Specific level info'; } + } + else if (m_licenseType == "license3") { + licenseUrl = "http://creativecommons.org/publicdomain/zero/1.0/"; + } + else { + licenseUrl = "https://dbpedia.org/page/All_rights_reserved"; + } + return licenseUrl + } + + /** + * Utility function, similar to a json Write, to return string version in json-ld format + * Should also return key + */ + def to_jsonld () : JsValue = { + val licURI = this.urlViaAttributes() //URI = URL except in one case: + val licURL = if (licURI != "https://dbpedia.org/page/All_rights_reserved") licURI + else "" + val licLD = JsObject(Seq( + "@id" -> JsString(licURI), + "URL" -> JsString(licURL), + "@type" -> JsString("license"), + "Text" -> JsString(m_licenseText) //added this DataType + )) + return licLD + } +} diff --git a/app/models/Section.scala b/app/models/Section.scala index c1793a1ed..59f40c85c 100644 --- a/app/models/Section.scala +++ b/app/models/Section.scala @@ -1,5 +1,7 @@ package models +import play.api.libs.json._ + /** * A portion of a file. * @@ -17,7 +19,12 @@ case class Section( metadataCount: Long = 0, @deprecated("use Metadata","since the use of jsonld") jsonldMetadata : List[Metadata]= List.empty, thumbnail_id: Option[String] = None, - tags: List[Tag] = List.empty) + tags: List[Tag] = List.empty) { + def to_jsonld() : JsValue = { + return Json.toJson(description) + } + } + case class Rectangle( x: Double, @@ -25,4 +32,4 @@ case class Rectangle( w: Double, h: Double) { override def toString() = f"x: $x%.2f, y: $y%.2f, width: $w%.2f, height: $h%.2f" -} \ No newline at end of file +} diff --git a/app/models/ServerStartTime.scala b/app/models/ServerStartTime.scala index 15d02a0d9..521a4e6d5 100644 --- a/app/models/ServerStartTime.scala +++ b/app/models/ServerStartTime.scala @@ -2,6 +2,7 @@ package models import java.util.Date + /** * Keeps track of server start time * Used in Global Object @@ -10,4 +11,4 @@ import java.util.Date object ServerStartTime { var startTime: Date=null -} \ No newline at end of file +} diff --git a/app/models/Statistic.scala b/app/models/Statistic.scala index bdc8c02da..40a19b02a 100644 --- a/app/models/Statistic.scala +++ b/app/models/Statistic.scala @@ -10,7 +10,12 @@ case class Statistics ( downloads: Int = 0, last_viewed: Option[Date] = None, last_downloaded: Option[Date] = None -) +) { + def to_jsonld() : JsValue = { + return Json.toJson(views) + } + } + case class StatisticUser ( user_id: UUID, diff --git a/app/models/Tag.scala b/app/models/Tag.scala index ce3962d92..6e99c6aa1 100644 --- a/app/models/Tag.scala +++ b/app/models/Tag.scala @@ -2,6 +2,9 @@ package models import java.util.Date +import play.api.libs.json._ + + /** * Add and remove tags * @@ -11,4 +14,8 @@ case class Tag( name: String, userId: Option[String], extractor_id: Option[String], - created: Date) + created: Date) { + def to_json() : JsValue = { + return Json.toJson(name) + } + } diff --git a/app/models/User.scala b/app/models/User.scala index 3564e5253..d07fe8020 100644 --- a/app/models/User.scala +++ b/app/models/User.scala @@ -9,9 +9,11 @@ import play.api.libs.json.{JsObject, Json, Writes} import securesocial.core._ import services.AppConfiguration +import play.api.libs.json._ + object UserStatus extends Enumeration { type UserStatus = Value - val Inactive, Active, Admin = Value + val Inactive, Active, Admin, ReadOnly = Value } /** @@ -108,7 +110,31 @@ case class MiniUser( id: UUID, fullName: String, avatarURL: String, - email: Option[String]) + email: Option[String]) { + /** + * return MiniUser as string in jsonld format, w/fullName split into first and last + */ + def to_jsonld() : JsValue = { + var firstName = ""; + var lastName = ""; + if (fullName.split("\\w+").length > 1) { + lastName = fullName.substring(fullName.lastIndexOf(" ") + 1); + firstName = fullName.substring(0, fullName.lastIndexOf(' ')); + } else { + firstName = fullName; + } + val authorLD = JsObject(Seq( + "@type" -> JsString("Person"), + "name" -> JsString(fullName), + "givenName" -> JsString(firstName), + "familyName" -> JsString(lastName), + "email" -> JsString(email.getOrElse("")), + "image" -> JsString(avatarURL) + )) + return authorLD + } + } + case class ClowderUser( id: UUID = UUID.generate(), diff --git a/app/services/CollectionService.scala b/app/services/CollectionService.scala index 5f3c3971e..3715921ee 100644 --- a/app/services/CollectionService.scala +++ b/app/services/CollectionService.scala @@ -252,4 +252,6 @@ trait CollectionService { def getMetrics(): Iterator[Collection] + def isInTrash(id: UUID): Boolean + } diff --git a/app/services/DatasetService.scala b/app/services/DatasetService.scala index 071a707bf..1949c3ae3 100644 --- a/app/services/DatasetService.scala +++ b/app/services/DatasetService.scala @@ -389,5 +389,7 @@ trait DatasetService { def getTrashedIds(): List[UUID] + def isInTrash(id: UUID): Boolean + def recursiveArchive(dataset: Dataset, host: String, parameters: JsObject, apiKey: Option[String], user: Option[User]) } diff --git a/app/services/ElasticsearchPlugin.scala b/app/services/ElasticsearchPlugin.scala index 25b7c8d9d..5888c09f7 100644 --- a/app/services/ElasticsearchPlugin.scala +++ b/app/services/ElasticsearchPlugin.scala @@ -228,12 +228,18 @@ class ElasticsearchPlugin(application: Application) extends Plugin { // Check permissions for each resource results.foreach(resource => { resource.resourceType match { - case ResourceRef.file => if (Permission.checkPermission(user, Permission.ViewFile, resource)) - filesFound += resource.id - case ResourceRef.dataset => if (Permission.checkPermission(user, Permission.ViewDataset, resource)) - datasetsFound += resource.id - case ResourceRef.collection => if (Permission.checkPermission(user, Permission.ViewDataset, resource)) - collectionsFound += resource.id + case ResourceRef.file => { + if (Permission.checkPermission(user, Permission.ViewFile, resource) && !files.isInTrash(resource.id)) + filesFound += resource.id + } + case ResourceRef.dataset => { + if (Permission.checkPermission(user, Permission.ViewDataset, resource) && !datasets.isInTrash(resource.id)) + datasetsFound += resource.id + } + case ResourceRef.collection => { + if (Permission.checkPermission(user, Permission.ViewDataset, resource) && !collections.isInTrash(resource.id)) + collectionsFound += resource.id + } case _ => {} } }) @@ -527,7 +533,7 @@ class ElasticsearchPlugin(application: Application) extends Plugin { case jv: JsArray => { builder.startArray(clean_k) jv.value.foreach(subv => { - builder.value(subv.toString.replace("\"","")) + builder.value(convertJsObjectToBuilder(builder, subv.asInstanceOf[JsObject])) }) builder.endArray() } @@ -598,11 +604,9 @@ class ElasticsearchPlugin(application: Application) extends Plugin { // Elasticsearch 2 does not allow periods in field names builder.startArray(k.toString.replace(".", "_")) v.value.foreach(jv => { - // Try to interpret numeric value from each String if possible - parseDouble(jv.toString) match { - case Some(d) => builder.value(d) - case None => builder.value(jv) - } + builder.startObject() + convertJsObjectToBuilder(builder, jv.asInstanceOf[JsObject]) + builder.endObject() }) builder.endArray() } diff --git a/app/services/ExtractorRoutingService.scala b/app/services/ExtractorRoutingService.scala index 545eb4194..4915bb35a 100644 --- a/app/services/ExtractorRoutingService.scala +++ b/app/services/ExtractorRoutingService.scala @@ -72,12 +72,12 @@ class ExtractorRoutingService { * @param resourceType the type of resource to check * @return filtered list of extractors */ - private def getMatchingExtractors(extractorIds: List[String], operation: String, resourceType: ResourceType.Value): List[String] = { + private def getMatchingExtractors(extractorIds: List[String], operation: String, resourceType: ResourceType.Value, user: Option[User] = None): List[String] = { val extractorsService = DI.injector.getInstance(classOf[ExtractorService]) extractorIds.flatMap(exId => - extractorsService.getExtractorInfo(exId)).filter(exInfo => - resourceType match { + extractorsService.getExtractorInfo(exId, None, None)).filter(exInfo => { + val processMatch = resourceType match { case ResourceType.dataset => containsOperation(exInfo.process.dataset, operation) case ResourceType.file => @@ -87,7 +87,17 @@ class ExtractorRoutingService { case _ => false } - ).map(_.name) + val permissionMatch = exInfo.unique_key match { + case Some(key) => { + user match { + case None => false // User must be provided for a key-protected extractor + case Some(u) => exInfo.permissions.contains(new ResourceRef('user,u.id)) + } + } + case None => true + } + processMatch && permissionMatch + }).map(_.name) } /** @@ -96,15 +106,15 @@ class ExtractorRoutingService { * @param operation The dataset operation requested. * @return A list of extractors IDs. */ - private def getSpaceExtractorsByOperation(dataset: Dataset, operation: String, resourceType: ResourceType.Value): (List[String], List[String]) = { + private def getSpaceExtractorsByOperation(dataset: Dataset, operation: String, resourceType: ResourceType.Value, user: Option[User] = None): (List[String], List[String]) = { val spacesService = DI.injector.getInstance(classOf[SpaceService]) var enabledExtractors = new ListBuffer[String]() var disabledExtractors = new ListBuffer[String]() dataset.spaces.map(space => { spacesService.getAllExtractors(space).foreach { extractors => - enabledExtractors.appendAll(getMatchingExtractors(extractors.enabled, operation, resourceType)) - disabledExtractors.appendAll(getMatchingExtractors(extractors.disabled, operation, resourceType)) + enabledExtractors.appendAll(getMatchingExtractors(extractors.enabled, operation, resourceType, user)) + disabledExtractors.appendAll(getMatchingExtractors(extractors.disabled, operation, resourceType, user)) } }) (enabledExtractors.toList, disabledExtractors.toList) @@ -145,7 +155,7 @@ class ExtractorRoutingService { * @param contentType the content type of the file in the case of a file * @return a set of unique rabbitmq queues */ - private def getQueues(dataset: Dataset, routingKey: String, contentType: String): Set[String] = { + private def getQueues(dataset: Dataset, routingKey: String, contentType: String, user: Option[User] = None): Set[String] = { val extractorsService = DI.injector.getInstance(classOf[ExtractorService]) // drop the first fragment from the routing key and replace characters to create operation id @@ -160,9 +170,9 @@ class ExtractorRoutingService { else return Set.empty[String] // get extractors enabled at the global level - val globalExtractors = getMatchingExtractors(extractorsService.getEnabledExtractors(), operation, resourceType) + val globalExtractors = getMatchingExtractors(extractorsService.getEnabledExtractors(), operation, resourceType, user) // get extractors enabled/disabled at the space level - val (enabledExtractors, disabledExtractors) = getSpaceExtractorsByOperation(dataset, operation, resourceType) + val (enabledExtractors, disabledExtractors) = getSpaceExtractorsByOperation(dataset, operation, resourceType, user) // get queues based on RabbitMQ bindings (old method). val queuesFromBindings = getQueuesFromBindings(routingKey) // take the union of queues so that we publish to a specific queue only once @@ -229,6 +239,7 @@ class ExtractorRoutingService { var jobId: Option[UUID] = None dataset match { case Some(d) => { + // TODO: Check private extractor behavior getQueues(d, routingKey, file.contentType).foreach { queue => val source = Entity(ResourceRef(ResourceRef.file, file.id), Some(file.contentType), sourceExtra) diff --git a/app/services/ExtractorService.scala b/app/services/ExtractorService.scala index 75acf38e1..d750eaf58 100644 --- a/app/services/ExtractorService.scala +++ b/app/services/ExtractorService.scala @@ -35,13 +35,13 @@ trait ExtractorService { def dropAllExtractorStatusCollection() - def listExtractorsInfo(categories: List[String]): List[ExtractorInfo] + def listExtractorsInfo(categories: List[String], user: Option[UUID]): List[ExtractorInfo] - def getExtractorInfo(extractorName: String): Option[ExtractorInfo] + def getExtractorInfo(extractorName: String, extractorKey: Option[String], user: Option[User]): Option[ExtractorInfo] def updateExtractorInfo(e: ExtractorInfo): Option[ExtractorInfo] - def deleteExtractor(extractorName: String) + def deleteExtractor(extractorName: String, extractorKey: Option[String]) def listExtractorsLabels(): List[ExtractorsLabel] diff --git a/app/services/FileService.scala b/app/services/FileService.scala index 9558e7daf..6d474379d 100644 --- a/app/services/FileService.scala +++ b/app/services/FileService.scala @@ -248,4 +248,6 @@ trait FileService { def getIterator(space: Option[String], since: Option[String], until: Option[String]): Iterator[File] + def isInTrash(id: UUID): Boolean + } diff --git a/app/services/KeycloakProvider.scala b/app/services/KeycloakProvider.scala new file mode 100644 index 000000000..dab72cda6 --- /dev/null +++ b/app/services/KeycloakProvider.scala @@ -0,0 +1,103 @@ +package services + +import play.api.libs.ws.WS +import play.api.{Application, Logger} +import play.api.libs.json.JsObject +import securesocial.core._ +import scala.collection.JavaConverters._ + + +/** + * A Keycloak OAuth2 Provider + */ +class KeycloakProvider(application: Application) extends OAuth2Provider(application) { + val Error = "error" + val Message = "message" + val Type = "type" + val Sub = "sub" + val Name = "name" + val GivenName = "given_name" + val FamilyName = "family_name" + // todo: picture wont work + val Picture = "picture" + val Email = "email" + val Groups = "groups" + + override def id = KeycloakProvider.Keycloak + + def fillProfile(user: SocialUser): SocialUser = { + val UserInfoApi = loadProperty("userinfoUrl").getOrElse(throwMissingPropertiesException()) + val accessToken = user.oAuth2Info.get.accessToken + val promise = WS.url(UserInfoApi.toString).withHeaders(("Authorization", "Bearer " + accessToken)).get() + + try { + val response = awaitResult(promise) + val me = response.json + Logger.debug("Got back from Keycloak : " + me.toString()) + (me \ Error).asOpt[JsObject] match { + case Some(error) => + val message = (error \ Message).as[String] + val errorType = ( error \ Type).as[String] + Logger.error("[securesocial] error retrieving profile information from Keycloak. Error type = %s, message = %s" + .format(errorType,message)) + throw new AuthenticationException() + case _ => + val userId = (me \ Sub).as[String] + val firstName = (me \ GivenName).asOpt[String] + val lastName = (me \ FamilyName).asOpt[String] + val fullName = (me \ Name).asOpt[String] + val avatarUrl = ( me \ Picture).asOpt[String] + val email = ( me \ Email).asOpt[String] + val groups = ( me \ Groups).asOpt[List[String]] + val roles = ( me \ "resource_access" \ "account" \ "roles").asOpt[List[String]] + (application.configuration.getList("securesocial.keycloak.groups"), groups) match { + case (Some(conf), Some(keycloak)) => { + val conflist = conf.unwrapped().asScala.toList + if (keycloak.intersect(conflist).isEmpty) { + throw new AuthenticationException() + } + } + case (Some(_), None) => throw new AuthenticationException() + case (None, _) => Logger.debug("[securesocial] No check needed for groups") + } + (application.configuration.getList("securesocial.keycloak.roles"), roles) match { + case (Some(conf), Some(keycloak)) => { + val conflist = conf.unwrapped().asScala.toList + if (keycloak.intersect(conflist).isEmpty) { + throw new AuthenticationException() + } + } + case (Some(_), None) => throw new AuthenticationException() + case (None, _) => Logger.debug("[securesocial] No check needed for roles") + } + user.copy( + identityId = IdentityId(userId, id), + firstName = firstName.getOrElse(""), + lastName = lastName.getOrElse(""), + fullName = fullName.getOrElse({ + if (firstName.isDefined && lastName.isDefined) { + firstName.get + " " + lastName.get + } else if (firstName.isDefined) { + firstName.get + } else if (lastName.isDefined) { + lastName.get + } else { + "" + } + }), + avatarUrl = avatarUrl, + email = email + ) + } + } catch { + case e: Exception => { + Logger.error( "[securesocial] error retrieving profile information from Keycloak", e) + throw new AuthenticationException() + } + } + } +} + +object KeycloakProvider { + val Keycloak = "keycloak" +} diff --git a/app/services/MessageService.scala b/app/services/MessageService.scala index 5eb95d557..c6eda512a 100644 --- a/app/services/MessageService.scala +++ b/app/services/MessageService.scala @@ -298,6 +298,7 @@ class EventFilter(channel: Channel, queue: String) extends Actor { * @param queue */ class ExtractorsHeartbeats(channel: Channel, queue: String) extends Actor { + val users: UserService = DI.injector.getInstance(classOf[UserService]) val extractions: ExtractionService = DI.injector.getInstance(classOf[ExtractionService]) val extractorsService: ExtractorService = DI.injector.getInstance(classOf[ExtractorService]) @@ -305,6 +306,7 @@ class ExtractorsHeartbeats(channel: Channel, queue: String) extends Actor { case statusBody: String => Logger.debug("Received extractor heartbeat: " + statusBody) val json = Json.parse(statusBody) + Logger.debug(json.toString) // TODO store running extractors ids val id = UUID((json \ "id").as[String]) val queue = (json \ "queue").as[String] @@ -313,52 +315,78 @@ class ExtractorsHeartbeats(channel: Channel, queue: String) extends Actor { // Validate document val extractionInfoResult = extractor_info.validate[ExtractorInfo] + // Determine if there is a user associated with this request + val owner = (json \ "owner").as[String] + val user: Option[User] = if (owner.length > 0) { + users.findByEmail(owner) + } else { + None + } + // Update database extractionInfoResult.fold( - errors => { - Logger.debug("Received extractor heartbeat with bad format: " + extractor_info) - }, + errors => Logger.debug("Received extractor heartbeat with bad format: " + extractor_info), info => { - extractorsService.getExtractorInfo(info.name) match { - case Some(infoFromDB) => { - // TODO only update if new semantic version is greater than old semantic version - if (infoFromDB.version != info.version) { - // TODO keep older versions of extractor info instead of just the latest one - extractorsService.updateExtractorInfo(info) - Logger.info("Updated extractor definition for " + info.name) + if (info.unique_key.isDefined && user.isEmpty) { + Logger.error("Extractor keys must have a user associated with them.") + } else { + extractorsService.getExtractorInfo(info.name, info.unique_key, user) match { + case Some(infoFromDB) => { + if (info.unique_key.isDefined) { + // Retain existing permissions + val registrationInfo = info.unique_key match { + case Some(ek) => info.copy(permissions=infoFromDB.permissions) + case None => info + } + extractorsService.updateExtractorInfo(registrationInfo) + Logger.info(s"Updated private extractor definition for ${info.name} - ${info.unique_key}") + } else { + // TODO only update if new semantic version is greater than old semantic version + if (infoFromDB.version != info.version) { + // TODO keep older versions of extractor info instead of just the latest one + extractorsService.updateExtractorInfo(info) + Logger.info(s"Updated extractor definition for ${info.name}") + } + } } - } - case None => { - extractorsService.updateExtractorInfo(info) match { - case None => {} - case Some(eInfo) => { - // Create (if needed) and assign default labels - eInfo.defaultLabels.foreach(labelStr => { - val segments = labelStr.split("/") - val (labelName, labelCategory) = if (segments.length > 1) { - (segments(1), segments(0)) - } else { - (segments(0), "Other") - } - extractorsService.getExtractorsLabel(labelName) match { - case None => { - // Label does not exist - create and assign it - val createdLabel = extractorsService.createExtractorsLabel(labelName, Some(labelCategory), List[String](eInfo.name)) + case None => { + // Inject user into permissions list if a key is given + val registrationInfo = info.unique_key match { + case Some(ek) => info.copy(permissions=List(ResourceRef('user, user.get.id))) + case None => info + } + extractorsService.updateExtractorInfo(registrationInfo) match { + case None => {} + case Some(eInfo) => { + // Create (if needed) and assign default labels + eInfo.defaultLabels.foreach(labelStr => { + val segments = labelStr.split("/") + val (labelName, labelCategory) = if (segments.length > 1) { + (segments(1), segments(0)) + } else { + (segments(0), "Other") } - case Some(lbl) => { - // Label already exists, assign it - if (!lbl.extractors.contains(eInfo.name)) { - val label = ExtractorsLabel(lbl.id, lbl.name, lbl.category, lbl.extractors ++ List[String](eInfo.name)) - val updatedLabel = extractorsService.updateExtractorsLabel(label) + extractorsService.getExtractorsLabel(labelName) match { + case None => { + // Label does not exist - create and assign it + val createdLabel = extractorsService.createExtractorsLabel(labelName, Some(labelCategory), List[String](eInfo.name)) + } + case Some(lbl) => { + // Label already exists, assign it + if (!lbl.extractors.contains(eInfo.name)) { + val label = ExtractorsLabel(lbl.id, lbl.name, lbl.category, lbl.extractors ++ List[String](eInfo.name)) + val updatedLabel = extractorsService.updateExtractorsLabel(label) + } } } - } - }) + }) + } } - } - Logger.info(s"New extractor ${info.name} registered from heartbeat") + Logger.info(s"New extractor ${info.name} registered from heartbeat with key "+info.unique_key.toString) + } } + } } ) diff --git a/app/services/mongodb/MongoDBCollectionService.scala b/app/services/mongodb/MongoDBCollectionService.scala index 6c9c58b70..340e86a8b 100644 --- a/app/services/mongodb/MongoDBCollectionService.scala +++ b/app/services/mongodb/MongoDBCollectionService.scala @@ -1116,6 +1116,10 @@ class MongoDBCollectionService @Inject() ( Collection.find(MongoDBObject("trash" -> false)).toIterator } + def isInTrash(id: UUID): Boolean = { + Collection.findOne(MongoDBObject("trash" -> true, "_id" -> new ObjectId(id.stringify))).isDefined + } + private def isSubCollectionIdInCollection(subCollectionId: UUID, collection: Collection) : Boolean = { if (collection.child_collection_ids.contains(subCollectionId)){ return true diff --git a/app/services/mongodb/MongoDBDatasetService.scala b/app/services/mongodb/MongoDBDatasetService.scala index 2f456f7c1..a02677c23 100644 --- a/app/services/mongodb/MongoDBDatasetService.scala +++ b/app/services/mongodb/MongoDBDatasetService.scala @@ -506,8 +506,10 @@ class MongoDBDatasetService @Inject() ( val found = Dataset.find(query).toList val notFound = ids.diff(found.map(_.id)) - if (notFound.length > 0) - Logger.error("Not all dataset IDs found for bulk get request") + if (notFound.length > 0) { + Logger.error("Not all dataset IDs found for [Dataset] bulk get request") + Logger.error("notfound=" + notFound.toString) + } return DBResult(found, notFound) } @@ -727,12 +729,12 @@ class MongoDBDatasetService @Inject() ( if (file.isInstanceOf[models.File]) { val theFile = file.asInstanceOf[models.File] if (!theFile.thumbnail_id.isEmpty) { - Dataset.update(MongoDBObject("_id" -> new ObjectId(datasetId.stringify)), $set("thumbnail_id" -> theFile.thumbnail_id.get), false, false, WriteConcern.Safe) + Dataset.update(MongoDBObject("_id" -> new ObjectId(datasetId.stringify)), $set("thumbnail_id" -> theFile.thumbnail_id.get, "lastModifiedDate" -> new Date()), false, false, WriteConcern.Safe) return } } } - Dataset.update(MongoDBObject("_id" -> new ObjectId(datasetId.stringify)), $set("thumbnail_id" -> None), false, false, WriteConcern.Safe) + Dataset.update(MongoDBObject("_id" -> new ObjectId(datasetId.stringify)), $set("thumbnail_id" -> None, "lastModifiedDate" -> new Date()), false, false, WriteConcern.Safe) } case None => Logger.debug(s"Dataset $datasetId not found") } @@ -747,12 +749,12 @@ class MongoDBDatasetService @Inject() ( if (file.isInstanceOf[File]) { val theFile = file.asInstanceOf[File] if (!theFile.thumbnail_id.isEmpty) { - Dataset.update(MongoDBObject("_id" -> new ObjectId(datasetId.stringify)), $set("thumbnail_id" -> theFile.thumbnail_id.get), false, false, WriteConcern.Safe) + Dataset.update(MongoDBObject("_id" -> new ObjectId(datasetId.stringify)), $set("thumbnail_id" -> theFile.thumbnail_id.get, "lastModifiedDate" -> new Date()), false, false, WriteConcern.Safe) return } } } - Dataset.update(MongoDBObject("_id" -> new ObjectId(datasetId.stringify)), $set("thumbnail_id" -> None), false, false, WriteConcern.Safe) + Dataset.update(MongoDBObject("_id" -> new ObjectId(datasetId.stringify)), $set("thumbnail_id" -> None, "lastModifiedDate" -> new Date()), false, false, WriteConcern.Safe) } case None => Logger.debug("No dataset found with id " + datasetId) } @@ -927,13 +929,13 @@ class MongoDBDatasetService @Inject() ( val md = JSON.parse(json).asInstanceOf[DBObject] Dataset.dao.collection.findOne(MongoDBObject("_id" -> new ObjectId(id.stringify)), MongoDBObject("metadata" -> 1)) match { case None => { - Dataset.update(MongoDBObject("_id" -> new ObjectId(id.stringify)), $set("metadata" -> md), false, false, WriteConcern.Safe) + Dataset.update(MongoDBObject("_id" -> new ObjectId(id.stringify)), $set("metadata" -> md, "lastModifiedDate" -> new Date()), false, false, WriteConcern.Safe) } case Some(x) => { x.getAs[DBObject]("metadata") match { case Some(map) => { val union = map.asInstanceOf[DBObject] ++ md - Dataset.update(MongoDBObject("_id" -> new ObjectId(id.stringify)), $set("metadata" -> union), false, false, WriteConcern.Safe) + Dataset.update(MongoDBObject("_id" -> new ObjectId(id.stringify)), $set("metadata" -> union, "lastModifiedDate" -> new Date()), false, false, WriteConcern.Safe) } case None => Map.empty } @@ -945,23 +947,25 @@ class MongoDBDatasetService @Inject() ( Logger.debug("Adding XML metadata to dataset " + id + " from file " + fileId + ": " + json) val md = JsonUtil.parseJSON(json).asInstanceOf[java.util.LinkedHashMap[String, Any]].toMap Dataset.update(MongoDBObject("_id" -> new ObjectId(id.stringify)), - $addToSet("datasetXmlMetadata" -> DatasetXMLMetadata.toDBObject(models.DatasetXMLMetadata(md, fileId.stringify))), false, false, WriteConcern.Safe) + $addToSet("datasetXmlMetadata" -> DatasetXMLMetadata.toDBObject(models.DatasetXMLMetadata(md, fileId.stringify))) + ++ $set("lastModifiedDate" -> new Date()), false, false, WriteConcern.Safe) } def removeXMLMetadata(id: UUID, fileId: UUID) { Logger.debug("Removing XML metadata belonging to file " + fileId + " from dataset " + id + ".") - Dataset.update(MongoDBObject("_id" -> new ObjectId(id.stringify)), $pull("datasetXmlMetadata" -> MongoDBObject("fileId" -> fileId.stringify)), false, false, WriteConcern.Safe) + Dataset.update(MongoDBObject("_id" -> new ObjectId(id.stringify)), $pull("datasetXmlMetadata" -> MongoDBObject("fileId" -> fileId.stringify)) + ++ $set("lastModifiedDate" -> new Date()), false, false, WriteConcern.Safe) } def addUserMetadata(id: UUID, json: String) { Logger.debug("Adding/modifying user metadata to dataset " + id + " : " + json) val md = com.mongodb.util.JSON.parse(json).asInstanceOf[DBObject] - Dataset.update(MongoDBObject("_id" -> new ObjectId(id.stringify)), $set("userMetadata" -> md), false, false, WriteConcern.Safe) + Dataset.update(MongoDBObject("_id" -> new ObjectId(id.stringify)), $set("userMetadata" -> md, "lastModifiedDate" -> new Date()), false, false, WriteConcern.Safe) } /** Change the metadataCount field for a dataset */ def incrementMetadataCount(id: UUID, count: Long) = { - Dataset.update(MongoDBObject("_id" -> new ObjectId(id.stringify)), $inc("metadataCount" -> count), false, false, WriteConcern.Safe) + Dataset.update(MongoDBObject("_id" -> new ObjectId(id.stringify)), $inc("metadataCount" -> count) ++ $set("lastModifiedDate" -> new Date()), false, false, WriteConcern.Safe) } /** @@ -969,20 +973,20 @@ class MongoDBDatasetService @Inject() ( */ def updateInformation(id: UUID, description: String, name: String) { val result = Dataset.update(MongoDBObject("_id" -> new ObjectId(id.stringify)), - $set("description" -> description, "name" -> name), + $set("description" -> description, "name" -> name, "lastModifiedDate" -> new Date()), false, false, WriteConcern.Safe) } def updateName(id: UUID, name: String) { events.updateObjectName(id, name) val result = Dataset.update(MongoDBObject("_id" -> new ObjectId(id.stringify)), - $set("name" -> name), + $set("name" -> name, "lastModifiedDate" -> new Date()), false, false, WriteConcern.Safe) } def updateDescription(id: UUID, description: String){ val result = Dataset.update(MongoDBObject("_id" -> new ObjectId(id.stringify)), - $set("description" -> description), + $set("description" -> description, "lastModifiedDate" -> new Date()), false, false, WriteConcern.Safe) } @@ -993,7 +997,7 @@ class MongoDBDatasetService @Inject() ( //Don't allow duplicates if (Dataset.dao.find(MongoDBObject("_id" -> new ObjectId(id.stringify)) ++ MongoDBObject("creators" -> creator)).length == 0) { val result = Dataset.update(MongoDBObject("_id" -> new ObjectId(id.stringify)), - $push("creators" -> creator), + $push("creators" -> creator) ++ $set("lastModifiedDate" -> new Date()), false, false, WriteConcern.Safe) } } @@ -1003,7 +1007,7 @@ class MongoDBDatasetService @Inject() ( */ def removeCreator(id: UUID, creator: String) { Dataset.dao.update(MongoDBObject("_id" -> new ObjectId(id.stringify)), - $pull("creators" -> creator), false, false, WriteConcern.Safe) + $pull("creators" -> creator) ++ $set("lastModifiedDate" -> new Date()), false, false, WriteConcern.Safe) } /** @@ -1014,8 +1018,8 @@ class MongoDBDatasetService @Inject() ( if (Dataset.dao.find(MongoDBObject("_id" -> new ObjectId(id.stringify)) ++ MongoDBObject("creators" -> creator)).length != 0) { removeCreator(id, creator); Dataset.update(MongoDBObject("_id" -> new ObjectId(id.stringify)), - $push("creators" -> MongoDBObject("$each" -> MongoDBList(creator), "$position" -> position)), - false, false, WriteConcern.Safe) + $push("creators" -> MongoDBObject("$each" -> MongoDBList(creator), "$position" -> position)) + ++ $set("lastModifiedDate" -> new Date()),false, false, WriteConcern.Safe) } } @@ -1030,7 +1034,7 @@ class MongoDBDatasetService @Inject() ( def updateLicense(id: UUID, licenseType: String, rightsHolder: String, licenseText: String, licenseUrl: String, allowDownload: String) { val licenseData = models.LicenseData(m_licenseType = licenseType, m_rightsHolder = rightsHolder, m_licenseText = licenseText, m_licenseUrl = licenseUrl, m_allowDownload = allowDownload.toBoolean) val result = Dataset.update(MongoDBObject("_id" -> new ObjectId(id.stringify)), - $set("licenseData" -> LicenseData.toDBObject(licenseData)), + $set("licenseData" -> LicenseData.toDBObject(licenseData), "lastModifiedDate" -> new Date()), false, false, WriteConcern.Safe) } @@ -1054,14 +1058,14 @@ class MongoDBDatasetService @Inject() ( if (!existingTags.contains(shortTag)) { val tagObj = models.Tag(name = shortTag, userId = userIdStr, extractor_id = eid, created = createdDate) tagsAdded += tagObj - Dataset.update(MongoDBObject("_id" -> new ObjectId(id.stringify)), $addToSet("tags" -> Tag.toDBObject(tagObj)), false, false, WriteConcern.Safe) + Dataset.update(MongoDBObject("_id" -> new ObjectId(id.stringify)), $addToSet("tags" -> Tag.toDBObject(tagObj)) ++ $set("lastModifiedDate" -> new Date()), false, false, WriteConcern.Safe) } }) tagsAdded.toList } def setUserMetadataWasModified(id: UUID, wasModified: Boolean) { - Dataset.update(MongoDBObject("_id" -> new ObjectId(id.stringify)), $set("userMetadataWasModified" -> Some(wasModified)), false, false, WriteConcern.Safe) + Dataset.update(MongoDBObject("_id" -> new ObjectId(id.stringify)), $set("userMetadataWasModified" -> Some(wasModified), "lastModifiedDate" -> new Date()), false, false, WriteConcern.Safe) } def findMetadataChangedDatasets(): List[Dataset] = { @@ -1081,7 +1085,8 @@ class MongoDBDatasetService @Inject() ( def removeTag(id: UUID, tagId: UUID) { Logger.debug("Removing tag " + tagId) - val result = Dataset.update(MongoDBObject("_id" -> new ObjectId(id.stringify)), $pull("tags" -> MongoDBObject("_id" -> new ObjectId(tagId.stringify))), false, false, WriteConcern.Safe) + val result = Dataset.update(MongoDBObject("_id" -> new ObjectId(id.stringify)), $pull("tags" -> MongoDBObject("_id" -> new ObjectId(tagId.stringify))) + ++ $set("lastModifiedDate" -> new Date()), false, false, WriteConcern.Safe) } def removeTags(id: UUID, tags: List[String]) { @@ -1092,12 +1097,13 @@ class MongoDBDatasetService @Inject() ( // Only remove existing tags. tags.intersect(existingTags).map { tag => - Dataset.update(MongoDBObject("_id" -> new ObjectId(id.stringify)), $pull("tags" -> MongoDBObject("name" -> tag)), false, false, WriteConcern.Safe) + Dataset.update(MongoDBObject("_id" -> new ObjectId(id.stringify)), $pull("tags" -> MongoDBObject("name" -> tag)) + ++ $set("lastModifiedDate" -> new Date()), false, false, WriteConcern.Safe) } } def removeAllTags(id: UUID) { - Dataset.update(MongoDBObject("_id" -> new ObjectId(id.stringify)), $set("tags" -> List()), false, false, WriteConcern.Safe) + Dataset.update(MongoDBObject("_id" -> new ObjectId(id.stringify)), $set("tags" -> List(), "lastModifiedDate" -> new Date()), false, false, WriteConcern.Safe) } // ---------- Tags related code ends ------------------ @@ -1347,28 +1353,28 @@ class MongoDBDatasetService @Inject() ( } def addFile(datasetId: UUID, file: File) { - Dataset.update(MongoDBObject("_id" -> new ObjectId(datasetId.stringify)), $addToSet("files" -> new ObjectId(file.id.stringify)), false, false, WriteConcern.Safe) + Dataset.update(MongoDBObject("_id" -> new ObjectId(datasetId.stringify)), $addToSet("files" -> new ObjectId(file.id.stringify)) ++$set("lastModifiedDate" -> new Date()), false, false, WriteConcern.Safe) } def addFolder(datasetId: UUID, folderId: UUID) { - Dataset.update(MongoDBObject("_id" -> new ObjectId(datasetId.stringify)), $addToSet("folders" -> new ObjectId(folderId.stringify)), false, false, WriteConcern.Safe) + Dataset.update(MongoDBObject("_id" -> new ObjectId(datasetId.stringify)), $addToSet("folders" -> new ObjectId(folderId.stringify)) ++$set( "lastModifiedDate" -> new Date()), false, false, WriteConcern.Safe) } def addCollection(datasetId: UUID, collectionId: UUID) { - Dataset.update(MongoDBObject("_id" -> new ObjectId(datasetId.stringify)), $addToSet("collections" -> new ObjectId(collectionId.stringify)), false, false, WriteConcern.Safe) + Dataset.update(MongoDBObject("_id" -> new ObjectId(datasetId.stringify)), $addToSet("collections" -> new ObjectId(collectionId.stringify)) ++$set( "lastModifiedDate" -> new Date()), false, false, WriteConcern.Safe) } def removeCollection(datasetId: UUID, collectionId: UUID) { - Dataset.update(MongoDBObject("_id" -> new ObjectId(datasetId.stringify)), $pull("collections" -> new ObjectId(collectionId.stringify)), false, false, WriteConcern.Safe) + Dataset.update(MongoDBObject("_id" -> new ObjectId(datasetId.stringify)), $pull("collections" -> new ObjectId(collectionId.stringify)) ++$set( "lastModifiedDate" -> new Date()), false, false, WriteConcern.Safe) } def removeFile(datasetId: UUID, fileId: UUID) { - Dataset.update(MongoDBObject("_id" -> new ObjectId(datasetId.stringify)), $pull("files" -> new ObjectId(fileId.stringify)), false, false, WriteConcern.Safe) + Dataset.update(MongoDBObject("_id" -> new ObjectId(datasetId.stringify)), $pull("files" -> new ObjectId(fileId.stringify)) ++$set( "lastModifiedDate" -> new Date()), false, false, WriteConcern.Safe) removeXMLMetadata(datasetId, fileId) } def removeFolder(datasetId: UUID, folderId: UUID) { - Dataset.update(MongoDBObject("_id" -> new ObjectId(datasetId.stringify)), $pull("folders" -> new ObjectId(folderId.stringify)), false, false, WriteConcern.Safe) + Dataset.update(MongoDBObject("_id" -> new ObjectId(datasetId.stringify)), $pull("folders" -> new ObjectId(folderId.stringify)) ++$set( "lastModifiedDate" -> new Date()), false, false, WriteConcern.Safe) } def newThumbnail(datasetId: UUID) { @@ -1379,12 +1385,12 @@ class MongoDBDatasetService @Inject() ( if (file.isInstanceOf[models.File]) { val theFile = file.asInstanceOf[models.File] if (!theFile.thumbnail_id.isEmpty) { - Dataset.update(MongoDBObject("_id" -> new ObjectId(datasetId.stringify)), $set("thumbnail_id" -> theFile.thumbnail_id.get), false, false, WriteConcern.Safe) + Dataset.update(MongoDBObject("_id" -> new ObjectId(datasetId.stringify)), $set("thumbnail_id" -> theFile.thumbnail_id.get, "lastModifiedDate" -> new Date()), false, false, WriteConcern.Safe) return } } } - Dataset.update(MongoDBObject("_id" -> new ObjectId(datasetId.stringify)), $set("thumbnail_id" -> None), false, false, WriteConcern.Safe) + Dataset.update(MongoDBObject("_id" -> new ObjectId(datasetId.stringify)), $set("thumbnail_id" -> None, "lastModifiedDate" -> new Date()), false, false, WriteConcern.Safe) } case None => } @@ -1450,11 +1456,11 @@ class MongoDBDatasetService @Inject() ( def addToSpace(datasetId: UUID, spaceId: UUID): Unit = { val result = Dataset.update( MongoDBObject("_id" -> new ObjectId(datasetId.stringify)), - $addToSet("spaces" -> Some(new ObjectId(spaceId.stringify))), + $addToSet("spaces" -> Some(new ObjectId(spaceId.stringify))) ++$set( "lastModifiedDate" -> new Date()), false, false) if (get(datasetId).exists(_.isTRIAL == true) && spaces.get(spaceId).exists(_.isTrial == false)) { Dataset.update(MongoDBObject("_id" -> new ObjectId(datasetId.stringify)), - $set("status" -> DatasetStatus.DEFAULT.toString), + $set("status" -> DatasetStatus.DEFAULT.toString, "lastModifiedDate" -> new Date()), false, false) } } @@ -1462,7 +1468,7 @@ class MongoDBDatasetService @Inject() ( def removeFromSpace(datasetId: UUID, spaceId: UUID): Unit = { val result = Dataset.update( MongoDBObject("_id" -> new ObjectId(datasetId.stringify)), - $pull("spaces" -> Some(new ObjectId(spaceId.stringify))), + $pull("spaces" -> Some(new ObjectId(spaceId.stringify))) ++$set("lastModifiedDate" -> new Date()), false, false) if (play.Play.application().configuration().getBoolean("verifySpaces")) { @@ -1470,7 +1476,7 @@ class MongoDBDatasetService @Inject() ( get(datasetId) match { case Some(d) if !d.spaces.map(s => spaces.get(s)).flatten.exists(_.isTrial == false) => Dataset.update(MongoDBObject("_id" -> new ObjectId(datasetId.stringify)), - $set("status" -> DatasetStatus.TRIAL.toString), + $set("status" -> DatasetStatus.TRIAL.toString, "lastModifiedDate" -> new Date()), false, false) case _ => } @@ -1644,13 +1650,13 @@ class MongoDBDatasetService @Inject() ( def incrementDownloads(id: UUID, user: Option[User]) = { Logger.debug("updating downloads for dataset "+id.toString) Dataset.update(MongoDBObject("_id" -> new ObjectId(id.stringify)), - $inc("stats.downloads" -> 1) ++ $set("stats.last_downloaded" -> new Date), true, false, WriteConcern.Safe) + $inc("stats.downloads" -> 1) ++ $set("stats.last_downloaded" -> new Date, "lastModifiedDate" -> new Date()), true, false, WriteConcern.Safe) user match { case Some(u) => { Logger.debug("updating downloads for user "+u.toString) DatasetStats.update(MongoDBObject("user_id" -> new ObjectId(u.id.stringify), "resource_id" -> new ObjectId(id.stringify), "resource_type" -> "dataset"), - $inc("downloads" -> 1) ++ $set("last_downloaded" -> new Date), true, false, WriteConcern.Safe) + $inc("downloads" -> 1) ++ $set("last_downloaded" -> new Date, "lastModifiedDate" -> new Date()), true, false, WriteConcern.Safe) } case None => {} } @@ -1680,6 +1686,10 @@ class MongoDBDatasetService @Inject() ( trashedIds.toList } + def isInTrash(id: UUID): Boolean = { + Dataset.findOne(MongoDBObject("trash" -> true, "_id" -> new ObjectId(id.stringify))).isDefined + } + /** * Recursively submit requests to archive or unarchive the contents of the given dataset. * NOTE: "parameters" includes "operation", which supports both archiving and unarchiving diff --git a/app/services/mongodb/MongoDBExtractorService.scala b/app/services/mongodb/MongoDBExtractorService.scala index 039d7df06..3938e34bb 100644 --- a/app/services/mongodb/MongoDBExtractorService.scala +++ b/app/services/mongodb/MongoDBExtractorService.scala @@ -1,6 +1,6 @@ package services.mongodb -import javax.inject.Singleton +import javax.inject.{Inject, Singleton} import com.mongodb.casbah.Imports._ import com.mongodb.casbah.WriteConcern import com.mongodb.casbah.commons.MongoDBObject @@ -12,11 +12,12 @@ import play.api.Play.current import play.api.libs.json.{JsArray, JsNumber, JsObject, JsString, JsValue, Json} import services._ import services.mongodb.MongoContext.context - import org.bson.types.ObjectId @Singleton -class MongoDBExtractorService extends ExtractorService { +class MongoDBExtractorService @Inject() ( + users: MongoDBUserService + ) extends ExtractorService { def getExtractorServerIPList() = { var listServersIPs = List[String]() @@ -169,51 +170,106 @@ class MongoDBExtractorService extends ExtractorService { } } - def listExtractorsInfo(categories: List[String]): List[ExtractorInfo] = { + def listExtractorsInfo(categories: List[String], user: Option[UUID]): List[ExtractorInfo] = { + Logger.info("listing: "+categories.toString) var list_queue = List[ExtractorInfo]() val allDocs = ExtractorInfoDAO.findAll().sort(orderBy = MongoDBObject("name" -> -1)) for (doc <- allDocs) { - // If no categories are specified, return all extractor names - var category_match = categories.isEmpty - if (!category_match) { + // If no filters are specified, return all extractor names + var filter_match = (categories.isEmpty && doc.permissions.isEmpty) + if (!filter_match) { // Otherwise check if any extractor categories overlap requested categories (force uppercase) + val user_match = user match { + case Some(u) => { + val rr = new ResourceRef('user, u) + doc.permissions.contains(rr) || doc.permissions.isEmpty + } + case None => doc.permissions.isEmpty // If no user filter in registered extractor, everyone can see + } val upper_categories = categories.map(cat => cat.toUpperCase) - category_match = doc.categories.intersect(upper_categories).length > 0 + val category_match = categories.length == 0 || doc.categories.intersect(upper_categories).length > 0 + filter_match = (category_match && user_match) } - if (category_match) + if (filter_match) list_queue = doc :: list_queue } list_queue } - def getExtractorInfo(extractorName: String): Option[ExtractorInfo] = { - ExtractorInfoDAO.findOne(MongoDBObject("name" -> extractorName)) + def getExtractorInfo(extractorName: String, extractorKey: Option[String], user: Option[User]): Option[ExtractorInfo] = { + extractorKey match { + case Some(ek) => { + user match { + case None => { + Logger.error("User authentication required to view extractor info with a unique key.") + None + } + case Some(u) => { + val userRef = new ResourceRef('user, u.id) + ExtractorInfoDAO.findOne(MongoDBObject("name" -> extractorName, "unique_key" -> ek, "permissions" -> userRef)) + } + } + } + case None => ExtractorInfoDAO.findOne(MongoDBObject("name" -> extractorName, "unique_key" -> MongoDBObject("$exists" -> false))) + } } def updateExtractorInfo(e: ExtractorInfo): Option[ExtractorInfo] = { - ExtractorInfoDAO.findOne(MongoDBObject("name" -> e.name)) match { - case Some(old) => { - val updated = e.copy(id = old.id) - ExtractorInfoDAO.update(MongoDBObject("name" -> e.name), updated, false, false, WriteConcern.Safe) - Some(updated) - } + // TODO: Make this account for version as well + e.unique_key match { case None => { - ExtractorInfoDAO.save(e) - Some(e) + ExtractorInfoDAO.findOne(MongoDBObject("name" -> e.name, "unique_key" -> MongoDBObject("$exists" -> false))) match { + case Some(old) => { + val updated = e.copy(id = old.id) + ExtractorInfoDAO.update(MongoDBObject("name" -> e.name, "unique_key" -> MongoDBObject("$exists" -> false)), updated, false, false, WriteConcern.Safe) + Some(updated) + } + case None => { + ExtractorInfoDAO.save(e) + Some(e) + } + } + } + case Some(ek) => { + ExtractorInfoDAO.findOne(MongoDBObject("name" -> e.name, "unique_key" -> ek)) match { + case Some(old) => { + val updated = e.copy(id = old.id) + ExtractorInfoDAO.update(MongoDBObject("name" -> e.name, "unique_key" -> ek), updated, false, false, WriteConcern.Safe) + Some(updated) + } + case None => { + ExtractorInfoDAO.save(e) + Some(e) + } + } } } } - def deleteExtractor(extractorName: String) { - ExtractorInfoDAO.findOne(MongoDBObject("name" -> extractorName)) match { - case Some(extractor) => { - ExtractorInfoDAO.remove(MongoDBObject("name" -> extractor.name)) + def deleteExtractor(extractorName: String, extractorKey: Option[String]) { + extractorKey match { + case Some(ek) => { + ExtractorInfoDAO.findOne(MongoDBObject("name" -> extractorName, "unique_key" -> ek)) match { + case Some(extractor) => { + ExtractorInfoDAO.remove(MongoDBObject("name" -> extractor.name, "unique_key" -> ek)) + } + case None => { + Logger.error(s"No extractor found with name ${extractorName} and key ${ek}") + } + } } case None => { - Logger.info("No extractor found with name: " + extractorName) + ExtractorInfoDAO.findOne(MongoDBObject("name" -> extractorName, "unique_key" -> MongoDBObject("$exists" -> false))) match { + case Some(extractor) => { + ExtractorInfoDAO.remove(MongoDBObject("name" -> extractor.name, "unique_key" -> MongoDBObject("$exists" -> false))) + } + case None => { + Logger.error("No extractor found with name: " + extractorName) + } + } } } } @@ -246,15 +302,11 @@ class MongoDBExtractorService extends ExtractorService { def getLabelsForExtractor(extractorName: String): List[ExtractorsLabel] = { var results = List[ExtractorsLabel]() - ExtractorInfoDAO.findOne(MongoDBObject("name"->extractorName)) match { - case Some(info) => { - ExtractorsLabelDAO.findAll().foreach(label => { - if (label.extractors.contains(extractorName)) { - results = results ++ List[ExtractorsLabel](label) - } - }) + ExtractorsLabelDAO.findAll().foreach(label => { + if (label.extractors.contains(extractorName) && !results.contains(label)) { + results = results ++ List[ExtractorsLabel](label) } - } + }) results } } diff --git a/app/services/mongodb/MongoDBFileService.scala b/app/services/mongodb/MongoDBFileService.scala index 52daf75ed..6d2766cf4 100644 --- a/app/services/mongodb/MongoDBFileService.scala +++ b/app/services/mongodb/MongoDBFileService.scala @@ -1237,6 +1237,15 @@ class MongoDBFileService @Inject() ( until.foreach(t => query = query ++ ("uploadDate" $lte Parsers.fromISO8601(t))) FileDAO.find(query) } + + def isInTrash(id: UUID): Boolean = { + var foundTrash = false + datasets.findByFileIdAllContain(id).foreach(ds => { + if (ds.trash) + foundTrash = true + }) + foundTrash + } } object FileDAO extends ModelCompanion[File, ObjectId] { diff --git a/app/services/mongodb/MongoDBFolderService.scala b/app/services/mongodb/MongoDBFolderService.scala index 5dba1668d..5a11e6ea3 100644 --- a/app/services/mongodb/MongoDBFolderService.scala +++ b/app/services/mongodb/MongoDBFolderService.scala @@ -38,8 +38,10 @@ class MongoDBFolderService @Inject() (files: FileService, datasets: DatasetServi val found = FolderDAO.find(query).toList val notFound = ids.diff(found.map(ds => ds.id)) - if (notFound.length > 0) - Logger.error("Not all dataset IDs found for bulk get request") + if (notFound.length > 0) { + Logger.error("Not all dataset IDs found for [Folder] bulk get request") + Logger.error("notfound=" + notFound.toString) + } return DBResult(found, notFound) } @@ -181,4 +183,4 @@ object FolderDAO extends ModelCompanion[Folder, ObjectId] { case None =>throw new RuntimeException("No MongoSalatPlugin"); case Some(x) => new SalatDAO[Folder, ObjectId](collection = x.collection("folders")){} } -} \ No newline at end of file +} diff --git a/app/services/mongodb/MongoDBSpaceService.scala b/app/services/mongodb/MongoDBSpaceService.scala index c7dd53292..ab036586e 100644 --- a/app/services/mongodb/MongoDBSpaceService.scala +++ b/app/services/mongodb/MongoDBSpaceService.scala @@ -429,7 +429,6 @@ class MongoDBSpaceService @Inject() ( datasets.get(dataset) match { case Some(x) => { val datasetBytes = datasets.getBytesForDataset(dataset) - datasets.addToSpace(dataset, space) ProjectSpaceDAO.update(MongoDBObject("_id" -> new ObjectId(space.stringify)), $inc("spaceBytes" -> -1 * datasetBytes), upsert=false, multi=false, WriteConcern.Safe) ProjectSpaceDAO.update(MongoDBObject("_id" -> new ObjectId(space.stringify)), $inc("fileCount" -> -1 * x.files.length), upsert=false, multi=false, WriteConcern.Safe) ProjectSpaceDAO.update(MongoDBObject("_id" -> new ObjectId(space.stringify)), $inc("datasetCount" -> -1), upsert=false, multi=false, WriteConcern.Safe) diff --git a/app/services/mongodb/MongoSalatPlugin.scala b/app/services/mongodb/MongoSalatPlugin.scala index 3a625ae6c..f754d2d01 100644 --- a/app/services/mongodb/MongoSalatPlugin.scala +++ b/app/services/mongodb/MongoSalatPlugin.scala @@ -431,7 +431,7 @@ class MongoSalatPlugin(app: Application) extends Plugin { // Change from User active and serverAdmin flags to single status updateMongo("change-to-user-status", updateToUserStatus) - + // Capture original filename from FRBR metadata supplied by SEAD Migrator updateMongo("populate-original-filename", updateOriginalFilename) @@ -450,9 +450,8 @@ class MongoSalatPlugin(app: Application) extends Plugin { // Updates extractors enabled and disabled in a space updateMongo("update-space-extractors-selection", updateSpaceExtractorsSelection) - // Adds space bytes to space - updateMongo(updateKey = "update-space-bytes", updateSpaceBytes) - updateMongo(updateKey = "update-space-files", updateSpaceFiles) + // Adds status information to space + updateMongo(updateKey = "update-space-status", updateSpaceStatus) } private def updateMongo(updateKey: String, block: () => Unit): Unit = { @@ -478,14 +477,14 @@ class MongoSalatPlugin(app: Application) extends Plugin { private def addDateMovedToTrashCollections() { val q = MongoDBObject() - val s = MongoDBObject("$set" -> MongoDBObject("dateMovedToTrash" -> None, "trash"->false)) - collection("collections").update(q,s, multi=true) + val s = MongoDBObject("$set" -> MongoDBObject("dateMovedToTrash" -> None, "trash" -> false)) + collection("collections").update(q, s, multi = true) } private def addDateMovedToTrashDatasets() { val q = MongoDBObject() - val s = MongoDBObject("$set" -> MongoDBObject("dateMovedToTrash" -> None, "trash"->false)) - collection("datasets").update(q,s, multi=true) + val s = MongoDBObject("$set" -> MongoDBObject("dateMovedToTrash" -> None, "trash" -> false)) + collection("datasets").update(q, s, multi = true) } private def updateMongoChangeUserType() { @@ -1223,8 +1222,8 @@ class MongoSalatPlugin(app: Application) extends Plugin { var lastId: ObjectId = null var lastCollection: String = null var count = 0 - collection("metadata").find().sort(MongoDBObject("attachedTo" -> 1)).foreach{d => - d.getAs[DBObject]("attachedTo").foreach{at => + collection("metadata").find().sort(MongoDBObject("attachedTo" -> 1)).foreach { d => + d.getAs[DBObject]("attachedTo").foreach { at => (at.getAs[ObjectId]("_id"), at.getAs[String]("resourceType")) match { case (Some(id), Some(coll)) => { if (id != lastId) { @@ -1311,29 +1310,29 @@ class MongoSalatPlugin(app: Application) extends Plugin { userpasses.foreach { user => (user.getAs[ObjectId]("_id"), user.getAs[String]("email"), user.getAsOrElse[DBObject]("identityId", new MongoDBObject()).getAs[String]("userId")) match { - case (Some(userId), Some(email), Some(username)) => { - try { - // Find if user exists with lowercase email already - val conflicts = collection("social.users").count(MongoDBObject( - "_id" -> MongoDBObject("$ne" -> userId), - "identityId" -> MongoDBObject("userId" -> username.toLowerCase, "providerId" -> "userpass"))) - - if (conflicts == 0) { - collection("social.users").update(MongoDBObject("_id" -> userId), - MongoDBObject("$set" -> MongoDBObject( - "email" -> email.toLowerCase, - "identityId" -> MongoDBObject("userId" -> username.toLowerCase, "providerId" -> "userpass"))), upsert = false, multi = true) - } else { - // If there's already an account with lowercase email, deactivate this account - collection("social.users").update(MongoDBObject("_id" -> userId), - MongoDBObject("$set" -> MongoDBObject("active" -> false)), upsert = false, multi = true) - } - } catch { - case e: BSONException => Logger.error("Unable to update email for user with id: " + user) + case (Some(userId), Some(email), Some(username)) => { + try { + // Find if user exists with lowercase email already + val conflicts = collection("social.users").count(MongoDBObject( + "_id" -> MongoDBObject("$ne" -> userId), + "identityId" -> MongoDBObject("userId" -> username.toLowerCase, "providerId" -> "userpass"))) + + if (conflicts == 0) { + collection("social.users").update(MongoDBObject("_id" -> userId), + MongoDBObject("$set" -> MongoDBObject( + "email" -> email.toLowerCase, + "identityId" -> MongoDBObject("userId" -> username.toLowerCase, "providerId" -> "userpass"))), upsert = false, multi = true) + } else { + // If there's already an account with lowercase email, deactivate this account + collection("social.users").update(MongoDBObject("_id" -> userId), + MongoDBObject("$set" -> MongoDBObject("active" -> false)), upsert = false, multi = true) } + } catch { + case e: BSONException => Logger.error("Unable to update email for user with id: " + user) } - case _ => Logger.error("Missing user fields when updating email case") } + case _ => Logger.error("Missing user fields when updating email case") + } } } @@ -1455,21 +1454,21 @@ class MongoSalatPlugin(app: Application) extends Plugin { } private def updateAvatarUrl() { - val q = MongoDBObject("avatarUrl" -> "^http://www.gravatar.com".r) + val q = MongoDBObject("avatarUrl" -> "^http://www.gravatar.com".r) collection("social.users").find(q).foreach { user => val avatar_url = user.getAsOrElse[String]("avatarUrl", "") - if(avatar_url.indexOf("http://www.gravatar.com") == 0 ) { + if (avatar_url.indexOf("http://www.gravatar.com") == 0) { val index = avatar_url.lastIndexOf("/") val new_gravatar = "https://www.gravatar.com/avatar" + avatar_url.substring(index) user.put("avatarUrl", new_gravatar) } collection("social.users").save(user, WriteConcern.Safe) } - collection("events").find(MongoDBObject("user.avatarURL" -> "^http://www.gravatar.com".r)).foreach{ event => + collection("events").find(MongoDBObject("user.avatarURL" -> "^http://www.gravatar.com".r)).foreach { event => event.getAs[DBObject]("user") match { case Some(mini_user) => { val avatar_url = mini_user.getAsOrElse("avatarURL", "") - if(avatar_url.indexOf("http://www.gravatar.com") == 0 ) { + if (avatar_url.indexOf("http://www.gravatar.com") == 0) { val index = avatar_url.lastIndexOf("/") val new_gravatar = "https://www.gravatar.com/avatar" + avatar_url.substring(index) mini_user.put("avatarURL", new_gravatar) @@ -1480,7 +1479,7 @@ class MongoSalatPlugin(app: Application) extends Plugin { event.getAs[DBObject]("targetuser") match { case Some(mini_user) => { val avatar_url = mini_user.getAsOrElse("avatarURL", "") - if(avatar_url.indexOf("http://www.gravatar.com") == 0 ) { + if (avatar_url.indexOf("http://www.gravatar.com") == 0) { val index = avatar_url.lastIndexOf("/") val new_gravatar = "https://www.gravatar.com/avatar" + avatar_url.substring(index) mini_user.put("avatarURL", new_gravatar) @@ -1490,11 +1489,11 @@ class MongoSalatPlugin(app: Application) extends Plugin { } collection("events").save(event, WriteConcern.Safe) } - collection("collections").find(MongoDBObject("author.avatarURL" -> "^http://www.gravatar.com".r)).foreach{ c => + collection("collections").find(MongoDBObject("author.avatarURL" -> "^http://www.gravatar.com".r)).foreach { c => c.getAs[DBObject]("author") match { case Some(mini_user) => { val avatar_url = mini_user.getAsOrElse("avatarURL", "") - if(avatar_url.indexOf("http://www.gravatar.com") == 0 ) { + if (avatar_url.indexOf("http://www.gravatar.com") == 0) { val index = avatar_url.lastIndexOf("/") val new_gravatar = "https://www.gravatar.com/avatar" + avatar_url.substring(index) mini_user.put("avatarURL", new_gravatar) @@ -1504,11 +1503,11 @@ class MongoSalatPlugin(app: Application) extends Plugin { case _ => Logger.info("No miniuser associated with the collection ") } } - collection("datasets").find(MongoDBObject("author.avatarURL" -> "^http://www.gravatar.com".r)).foreach{ dataset => + collection("datasets").find(MongoDBObject("author.avatarURL" -> "^http://www.gravatar.com".r)).foreach { dataset => dataset.getAs[DBObject]("author") match { case Some(mini_user) => { val avatar_url = mini_user.getAsOrElse("avatarURL", "") - if(avatar_url.indexOf("http://www.gravatar.com") == 0 ) { + if (avatar_url.indexOf("http://www.gravatar.com") == 0) { val index = avatar_url.lastIndexOf("/") val new_gravatar = "https://www.gravatar.com/avatar" + avatar_url.substring(index) mini_user.put("avatarURL", new_gravatar) @@ -1519,11 +1518,11 @@ class MongoSalatPlugin(app: Application) extends Plugin { case _ => Logger.info("No miniuser associated with the dataset ") } } - collection("folders").find(MongoDBObject("author.avatarURL" -> "^http://www.gravatar.com".r))foreach{ folder => + collection("folders").find(MongoDBObject("author.avatarURL" -> "^http://www.gravatar.com".r)) foreach { folder => folder.getAs[DBObject]("author") match { case Some(mini_user) => { val avatar_url = mini_user.getAsOrElse("avatarURL", "") - if(avatar_url.indexOf("http://www.gravatar.com") == 0 ) { + if (avatar_url.indexOf("http://www.gravatar.com") == 0) { val index = avatar_url.lastIndexOf("/") val new_gravatar = "https://www.gravatar.com/avatar" + avatar_url.substring(index) mini_user.put("avatarURL", new_gravatar) @@ -1534,11 +1533,11 @@ class MongoSalatPlugin(app: Application) extends Plugin { case _ => Logger.info("No miniuser associated with the folder") } } - collection("uploads").find(MongoDBObject("author.avatarURL" -> "^http://www.gravatar.com".r))foreach{ file => + collection("uploads").find(MongoDBObject("author.avatarURL" -> "^http://www.gravatar.com".r)) foreach { file => file.getAs[DBObject]("author") match { case Some(mini_user) => { val avatar_url = mini_user.getAsOrElse("avatarURL", "") - if(avatar_url.indexOf("http://www.gravatar.com") == 0 ) { + if (avatar_url.indexOf("http://www.gravatar.com") == 0) { val index = avatar_url.lastIndexOf("/") val new_gravatar = "https://www.gravatar.com/avatar" + avatar_url.substring(index) mini_user.put("avatarURL", new_gravatar) @@ -1549,11 +1548,11 @@ class MongoSalatPlugin(app: Application) extends Plugin { case _ => Logger.info("No miniuser associated with the file") } } - collection("comments").find(MongoDBObject("author.avatarURL" -> "^http://www.gravatar.com".r))foreach{ comment => + collection("comments").find(MongoDBObject("author.avatarURL" -> "^http://www.gravatar.com".r)) foreach { comment => comment.getAs[DBObject]("author") match { case Some(mini_user) => { val avatar_url = mini_user.getAsOrElse("avatarURL", "") - if(avatar_url.indexOf("http://www.gravatar.com") == 0 ) { + if (avatar_url.indexOf("http://www.gravatar.com") == 0) { val index = avatar_url.lastIndexOf("/") val new_gravatar = "https://www.gravatar.com/avatar" + avatar_url.substring(index) mini_user.put("avatarURL", new_gravatar) @@ -1564,11 +1563,11 @@ class MongoSalatPlugin(app: Application) extends Plugin { case _ => Logger.info("No miniuser associated with the comment ") } } - collection("curationObjects").find(MongoDBObject("author.avatarURL" -> "^http://www.gravatar.com".r))foreach{ event => + collection("curationObjects").find(MongoDBObject("author.avatarURL" -> "^http://www.gravatar.com".r)) foreach { event => event.getAs[DBObject]("author") match { case Some(mini_user) => { val avatar_url = mini_user.getAsOrElse("avatarURL", "") - if(avatar_url.indexOf("http://www.gravatar.com") == 0 ) { + if (avatar_url.indexOf("http://www.gravatar.com") == 0) { val index = avatar_url.lastIndexOf("/") val new_gravatar = "https://www.gravatar.com/avatar" + avatar_url.substring(index) mini_user.put("avatarURL", new_gravatar) @@ -1579,26 +1578,26 @@ class MongoSalatPlugin(app: Application) extends Plugin { case _ => Logger.info("No miniuser associated with the curation Object ") } } - collection("metadata").find(MongoDBObject("creator.typeOfAgent" -> "cat:user", "creator.user.avatarURL" -> "^http://www.gravatar.com".r) )foreach{ metadata => + collection("metadata").find(MongoDBObject("creator.typeOfAgent" -> "cat:user", "creator.user.avatarURL" -> "^http://www.gravatar.com".r)) foreach { metadata => metadata.getAs[DBObject]("creator") match { case Some(creator) => { val typeOfAgent = creator.getAsOrElse("typeOfAgent", "") - if(typeOfAgent == "cat:user") { + if (typeOfAgent == "cat:user") { creator.getAs[DBObject]("user") match { case Some(mini_user) => { val avatar_url = mini_user.getAsOrElse("avatarURL", "") - if(avatar_url.indexOf("http://www.gravatar.com") == 0 ) { + if (avatar_url.indexOf("http://www.gravatar.com") == 0) { val index = avatar_url.lastIndexOf("/") val new_gravatar = "https://www.gravatar.com/avatar" + avatar_url.substring(index) mini_user.put("avatarURL", new_gravatar) } collection("metadata").save(metadata, WriteConcern.Safe) - } + } case _ => Logger.info("No miniuser associated with the curation Object ") + } } } - } case _ => Logger.info("No agent associated with the curation Object ") } } @@ -1620,13 +1619,13 @@ class MongoSalatPlugin(app: Application) extends Plugin { val path = content.getAsOrElse[String]("Upload Path", "") if (path.length > 0) { if (path.lastIndexOf("/") >= 0) { - Logger.info("Assigning name/: " + path.substring(path.lastIndexOf("/")+1) + " from path " + path) + Logger.info("Assigning name/: " + path.substring(path.lastIndexOf("/") + 1) + " from path " + path) md.getAs[DBObject]("attachedTo") match { case Some(ref) => { - collection("uploads").update(MongoDBObject("_id" -> new ObjectId(ref.get("_id").toString())), - MongoDBObject("$set" -> MongoDBObject( - "originalname" -> path.substring(path.lastIndexOf("/")+1))), false, false, WriteConcern.Safe) - + collection("uploads").update(MongoDBObject("_id" -> new ObjectId(ref.get("_id").toString())), + MongoDBObject("$set" -> MongoDBObject( + "originalname" -> path.substring(path.lastIndexOf("/") + 1))), false, false, WriteConcern.Safe) + } case _ => Logger.info("Nope") } @@ -1659,16 +1658,16 @@ class MongoSalatPlugin(app: Application) extends Plugin { private def updateEditorRole(): Unit = { val query = MongoDBObject("name" -> "Editor") - collection("roles").find(query).foreach {role => + collection("roles").find(query).foreach { role => role.put("permissions", Permission.EDITOR_PERMISSIONS.map(_.toString).toSet) collection("roles").save(role, WriteConcern.Safe) } - collection("social.users").foreach{user => + collection("social.users").foreach { user => val userSpaceRoles = user.getAsOrElse[MongoDBList]("spaceandrole", MongoDBList.empty) - userSpaceRoles.foreach{ userSpaceRole => + userSpaceRoles.foreach { userSpaceRole => val tempUserSpace = userSpaceRole.asInstanceOf[BasicDBObject] val tempRole = tempUserSpace.get("role").asInstanceOf[BasicDBObject] - if(tempRole.get("name") == "Editor") { + if (tempRole.get("name") == "Editor") { tempRole.put("permissions", Permission.EDITOR_PERMISSIONS.map(_.toString).toSet) } } @@ -1692,33 +1691,34 @@ class MongoSalatPlugin(app: Application) extends Plugin { print("DONE") } - private def updateSpaceBytes(): Unit = { - val spaces = collection("spaces.projects").find().toList.foreach{ space => + private def updateSpaceStatus(): Unit = { + collection("spaces.projects").find().toList.foreach { space => var currentSpaceBytes: Long = 0 + var fileCount: Integer = 0 val spaceId = space.get("_id") val spaceDatasets = collection("datasets").find(MongoDBObject("spaces" -> spaceId)).toList - spaceDatasets.foreach{ spaceDataset => + spaceDatasets.foreach { spaceDataset => val datasetFileIds = spaceDataset.getAsOrElse[MongoDBList]("files", MongoDBList.empty) - datasetFileIds.foreach{ fileId => + fileCount += datasetFileIds.length + datasetFileIds.foreach { fileId => collection("uploads").findOne(MongoDBObject("_id" -> fileId)) match { case Some(file) => currentSpaceBytes += file.get("length").asInstanceOf[Long] case None => Logger.info(s"Could not find file ${fileId} in space ${spaceId}") } + val folders = collection("folders").find(MongoDBObject("parentDatasetId" -> spaceDataset.get("_id"))).toList + folders.foreach { folder => + val folderFileIds = folder.getAsOrElse[MongoDBList]("files", MongoDBList.empty) + fileCount += folderFileIds.length + folderFileIds.foreach { fileId => + collection("uploads").findOne(MongoDBObject("_id" -> fileId)) match { + case Some(file) => currentSpaceBytes += file.get("length").asInstanceOf[Long] + case None => Logger.info(s"Could not find file ${fileId} in space ${spaceId}") + } + } + } } } - collection("spaces.projects").update(MongoDBObject("_id" -> spaceId), $set("spaceBytes" -> currentSpaceBytes)) - } - } - - private def updateSpaceFiles(): Unit = { - collection("spaces.projects").find().toList.foreach{ space => - var fileCount: Integer = 0 - val spaceId = space.get("_id") - val spaceDatasets = collection("datasets").find(MongoDBObject("spaces" -> spaceId)).toList - spaceDatasets.foreach{ spaceDataset => - fileCount += spaceDataset.getAsOrElse[MongoDBList]("files", MongoDBList.empty).length - } - collection("spaces.projects").update(MongoDBObject("_id" -> spaceId), $set("fileCount" -> fileCount)) + collection("spaces.projects").update(MongoDBObject("_id" -> spaceId), $set("spaceBytes" -> currentSpaceBytes, "fileCount" -> fileCount)) } } } diff --git a/app/views/admin/users.scala.html b/app/views/admin/users.scala.html index a5b0ed626..046a47c1a 100644 --- a/app/views/admin/users.scala.html +++ b/app/views/admin/users.scala.html @@ -18,32 +18,32 @@ never } } - @if(user.fold("")(_.id.stringify) == u.id.stringify) { - @if(!(u.status==UserStatus.Inactive)) { - + @if(user.fold("")(_.id.stringify) == u.id.stringify || configAdmins.contains(u.email.getOrElse(""))) { + + } + @if(u.status==UserStatus.Inactive) { + } else { - + } - } else { - @if(!(u.status==UserStatus.Inactive)) { - + @if(u.status==UserStatus.Active) { + } else { - + } - } - @if(user.fold("")(_.id.stringify) == u.id.stringify) { - - } else { - @if(configAdmins.contains(u.email.getOrElse(""))) { - + @if(u.status==UserStatus.ReadOnly) { + } else { - @if(u.status==UserStatus.Admin) { - - } else { - - } + } - } + @if(u.status==UserStatus.Admin) { + + } else { + + } + } @@ -66,12 +66,11 @@

- + - - + @@ -95,12 +94,11 @@

FullnameFullname Email Provider Last LoginActiveAdminStatus
- + - - + @@ -124,12 +122,11 @@

FullnameFullname Email Provider Last LoginActiveAdminStatus
- + - - + @@ -140,6 +137,34 @@

+
+ +
+
+

FullnameFullname Email Provider Last LoginActiveAdminStatus
+ + + + + + + + + + + @users.filter(u => u.status==UserStatus.ReadOnly).map(printRow) + +
FullnameEmailProviderLast LoginStatus
+ + + + @@ -148,45 +173,17 @@


@@ -287,17 +290,26 @@