diff --git a/assessment-api/assessment-actors/src/main/scala/org/sunbird/actors/QuestionActor.scala b/assessment-api/assessment-actors/src/main/scala/org/sunbird/actors/QuestionActor.scala index 3f0ed87fc..ffefa8ad6 100644 --- a/assessment-api/assessment-actors/src/main/scala/org/sunbird/actors/QuestionActor.scala +++ b/assessment-api/assessment-actors/src/main/scala/org/sunbird/actors/QuestionActor.scala @@ -3,6 +3,7 @@ package org.sunbird.actors import org.apache.commons.lang3.StringUtils import org.sunbird.`object`.importer.{ImportConfig, ImportManager} import org.sunbird.actor.core.BaseActor +import org.sunbird.cache.impl.RedisCache import org.sunbird.common.dto.{Request, Response, ResponseHandler} import org.sunbird.common.{DateUtils, Platform} import org.sunbird.graph.OntologyEngineContext @@ -92,6 +93,7 @@ class QuestionActor @Inject()(implicit oec: OntologyEngineContext) extends BaseA def systemUpdate(request: Request): Future[Response] = { val identifier = request.getContext.get("identifier").asInstanceOf[String] RequestUtil.validateRequest(request) + RedisCache.delete(identifier) val readReq = new Request(request) val identifiers = new util.ArrayList[String](){{ add(identifier) diff --git a/assessment-api/assessment-actors/src/main/scala/org/sunbird/actors/QuestionSetActor.scala b/assessment-api/assessment-actors/src/main/scala/org/sunbird/actors/QuestionSetActor.scala index 7ce683869..721271348 100644 --- a/assessment-api/assessment-actors/src/main/scala/org/sunbird/actors/QuestionSetActor.scala +++ b/assessment-api/assessment-actors/src/main/scala/org/sunbird/actors/QuestionSetActor.scala @@ -1,9 +1,9 @@ package org.sunbird.actors import java.util - import javax.inject.Inject import org.apache.commons.collections4.CollectionUtils +import org.apache.commons.lang3.StringUtils import org.sunbird.`object`.importer.{ImportConfig, ImportManager} import org.sunbird.actor.core.BaseActor import org.sunbird.cache.impl.RedisCache @@ -12,10 +12,12 @@ import org.sunbird.common.dto.{Request, Response, ResponseHandler} import org.sunbird.graph.OntologyEngineContext import org.sunbird.graph.nodes.DataNode import org.sunbird.graph.dac.model.Node +import org.sunbird.graph.utils.NodeUtil import org.sunbird.managers.HierarchyManager.hierarchyPrefix import org.sunbird.managers.{AssessmentManager, HierarchyManager, UpdateHierarchyManager} import org.sunbird.utils.RequestUtil +import scala.collection.JavaConverters import scala.collection.JavaConverters._ import scala.concurrent.{ExecutionContext, Future} @@ -40,9 +42,21 @@ class QuestionSetActor @Inject()(implicit oec: OntologyEngineContext) extends Ba case "rejectQuestionSet" => reject(request) case "importQuestionSet" => importQuestionSet(request) case "systemUpdateQuestionSet" => systemUpdate(request) + case "listQuestionSet" => listQuestionSet(request) case _ => ERROR(request.getOperation) } + def listQuestionSet(request: Request): Future[Response] = { + RequestUtil.validateListRequest(request) + val fields: util.List[String] = JavaConverters.seqAsJavaListConverter(request.get("fields").asInstanceOf[String].split(",").filter(field => StringUtils.isNotBlank(field) && !StringUtils.equalsIgnoreCase(field, "null"))).asJava + request.getRequest.put("fields", fields) + DataNode.search(request).map(nodeList => { + val questionList = nodeList.map(node => { + NodeUtil.serialize(node, fields, node.getObjectType.toLowerCase.replace("Image", ""), request.getContext.get("version").asInstanceOf[String]) + }).asJava + ResponseHandler.OK.put("questionSets", questionList).put("count", questionList.size) + }) + } def update(request: Request): Future[Response] = { RequestUtil.restrictProperties(request) request.getRequest.put("identifier", request.getContext.get("identifier")) @@ -142,9 +156,11 @@ class QuestionSetActor @Inject()(implicit oec: OntologyEngineContext) extends Ba def systemUpdate(request: Request): Future[Response] = { val identifier = request.getContext.get("identifier").asInstanceOf[String] RequestUtil.validateRequest(request) - if(Platform.getBoolean("questionset.cache.enable", false)) + if(Platform.getBoolean("questionset.cache.enable", false)) { RedisCache.delete(hierarchyPrefix + identifier) + } + RedisCache.delete(identifier) val readReq = new Request(request) val identifiers = new util.ArrayList[String](){{ add(identifier) diff --git a/assessment-api/assessment-service/app/controllers/v4/QuestionSetController.scala b/assessment-api/assessment-service/app/controllers/v4/QuestionSetController.scala index a82f320ae..2eb127c77 100644 --- a/assessment-api/assessment-service/app/controllers/v4/QuestionSetController.scala +++ b/assessment-api/assessment-service/app/controllers/v4/QuestionSetController.scala @@ -2,12 +2,20 @@ package controllers.v4 import akka.actor.{ActorRef, ActorSystem} import controllers.BaseController +import org.apache.commons.lang3.StringUtils +import org.sunbird.common.dto.{Request, Response, ResponseHandler} +import org.sunbird.graph.nodes.DataNode +import org.sunbird.graph.utils.NodeUtil +import org.sunbird.utils.RequestUtil + import javax.inject.{Inject, Named} import play.api.mvc.ControllerComponents -import utils.{ActorNames, ApiId, QuestionSetOperations} +import utils.{ActorNames, ApiId, QuestionOperations, QuestionSetOperations} +import java.util +import scala.collection.JavaConverters import scala.collection.JavaConverters._ -import scala.concurrent.ExecutionContext +import scala.concurrent.{ExecutionContext, Future} class QuestionSetController @Inject()(@Named(ActorNames.QUESTION_SET_ACTOR) questionSetActor: ActorRef, cc: ControllerComponents, actorSystem: ActorSystem)(implicit exec: ExecutionContext) extends BaseController(cc) { @@ -25,6 +33,18 @@ class QuestionSetController @Inject()(@Named(ActorNames.QUESTION_SET_ACTOR) ques getResult(ApiId.CREATE_QUESTION_SET, questionSetActor, questionSetRequest) } + def list(fields: Option[String]) = Action.async { implicit request => + val headers = commonHeaders() + val body = requestBody() + val question = body.getOrDefault("search", new java.util.HashMap()).asInstanceOf[java.util.Map[String, Object]]; + question.putAll(headers) + question.put("fields", fields.getOrElse("")) + val questionSetRequest = getRequest(question, headers, QuestionSetOperations.listQuestionSet.toString) + questionSetRequest.put("identifiers", questionSetRequest.get("identifier")) + setRequestContext(questionSetRequest, version, objectType, schemaName) + getResult(ApiId.LIST_QUESTIONSET, questionSetActor, questionSetRequest) + } + def read(identifier: String, mode: Option[String], fields: Option[String]) = Action.async { implicit request => val headers = commonHeaders() val questionSet = new java.util.HashMap().asInstanceOf[java.util.Map[String, Object]] diff --git a/assessment-api/assessment-service/app/utils/ApiId.scala b/assessment-api/assessment-service/app/utils/ApiId.scala index d45e57597..a5479f271 100644 --- a/assessment-api/assessment-service/app/utils/ApiId.scala +++ b/assessment-api/assessment-service/app/utils/ApiId.scala @@ -24,6 +24,7 @@ object ApiId { val SYSTEM_UPDATE_QUESTION = "api.question.system.update" val LIST_QUESTIONS = "api.questions.list" val REJECT_QUESTION = "api.question.reject" + val LIST_QUESTIONSET = "api.questionset.list" //QuestionSet APIs val CREATE_QUESTION_SET = "api.questionset.create" diff --git a/assessment-api/assessment-service/app/utils/QuestionSetOperations.scala b/assessment-api/assessment-service/app/utils/QuestionSetOperations.scala index afcd22e2b..643205710 100644 --- a/assessment-api/assessment-service/app/utils/QuestionSetOperations.scala +++ b/assessment-api/assessment-service/app/utils/QuestionSetOperations.scala @@ -3,5 +3,5 @@ package utils object QuestionSetOperations extends Enumeration { val createQuestionSet, readQuestionSet, readPrivateQuestionSet, updateQuestionSet, reviewQuestionSet, publishQuestionSet, retireQuestionSet, addQuestion, removeQuestion, updateHierarchyQuestion, readHierarchyQuestion, - rejectQuestionSet, importQuestionSet, systemUpdateQuestionSet = Value + rejectQuestionSet, importQuestionSet, listQuestionSet,systemUpdateQuestionSet = Value } diff --git a/assessment-api/assessment-service/conf/routes b/assessment-api/assessment-service/conf/routes index e33c9c1da..716a12b76 100644 --- a/assessment-api/assessment-service/conf/routes +++ b/assessment-api/assessment-service/conf/routes @@ -38,4 +38,5 @@ PATCH /questionset/v4/hierarchy/update controllers.v4.QuestionSetC GET /questionset/v4/hierarchy/:identifier controllers.v4.QuestionSetController.getHierarchy(identifier:String, mode:Option[String]) POST /questionset/v4/reject/:identifier controllers.v4.QuestionSetController.reject(identifier:String) POST /questionset/v4/import controllers.v4.QuestionSetController.importQuestionSet() -PATCH /questionset/v4/system/update/:identifier controllers.v4.QuestionSetController.systemUpdate(identifier:String) \ No newline at end of file +PATCH /questionset/v4/system/update/:identifier controllers.v4.QuestionSetController.systemUpdate(identifier:String) +POST /questionset/v4/list controllers.v4.QuestionSetController.list(fields:Option[String]) diff --git a/build/assessment-service/Dockerfile b/build/assessment-service/Dockerfile index d3b28b2cd..e9f6ebcc1 100644 --- a/build/assessment-service/Dockerfile +++ b/build/assessment-service/Dockerfile @@ -9,6 +9,6 @@ USER sunbird COPY ./assessment-api/assessment-service/target/assessment-service-1.0-SNAPSHOT-dist.zip /home/sunbird/ RUN unzip /home/sunbird/assessment-service-1.0-SNAPSHOT-dist.zip -d /home/sunbird/ RUN rm /home/sunbird/assessment-service-1.0-SNAPSHOT-dist.zip -COPY --chown=sunbird ./schemas /home/sunbird/assessment-service-1.0-SNAPSHOT/schemas +COPY --chown=sunbird ./schema/schemas /home/sunbird/assessment-service-1.0-SNAPSHOT/schemas WORKDIR /home/sunbird/ CMD java -XX:+PrintFlagsFinal $JAVA_OPTIONS -cp '/home/sunbird/assessment-service-1.0-SNAPSHOT/lib/*' -Dconfig.file=/home/sunbird/assessment-service-1.0-SNAPSHOT/config/application.conf -Dlogger.file=/home/sunbird/assessment-service-1.0-SNAPSHOT/config/logback.xml play.core.server.ProdServerStart /home/sunbird/assessment-service-1.0-SNAPSHOT diff --git a/build/assessment-service/Jenkinsfile b/build/assessment-service/Jenkinsfile index ebc9f1a63..e848b98d1 100644 --- a/build/assessment-service/Jenkinsfile +++ b/build/assessment-service/Jenkinsfile @@ -27,6 +27,13 @@ node('build-slave') { print "Environment will be : ${env.NODE_ENV}" sh 'mvn clean install -DskipTests=true ' } + stage('schema-pull') { + + dir("${env.WORKSPACE}/schema") { + checkout scm: [$class: 'GitSCM', branches: [[name: schema_repo_branch]], extensions: [[$class: 'CloneOption', depth: 1, noTags: true, reference: '', shallow: true]], userRemoteConfigs: [[credentialsId: schema_repo_credentials, url: schema_repo_url]]] + + } + } stage('Package') { dir('assessment-api') { diff --git a/build/content-service/Dockerfile b/build/content-service/Dockerfile index 2e829d061..234423fdb 100644 --- a/build/content-service/Dockerfile +++ b/build/content-service/Dockerfile @@ -9,6 +9,6 @@ USER sunbird COPY ./content-api/content-service/target/content-service-1.0-SNAPSHOT-dist.zip /home/sunbird/ RUN unzip /home/sunbird/content-service-1.0-SNAPSHOT-dist.zip -d /home/sunbird/ RUN rm /home/sunbird/content-service-1.0-SNAPSHOT-dist.zip -COPY --chown=sunbird ./schemas /home/sunbird/content-service-1.0-SNAPSHOT/schemas +COPY --chown=sunbird ./schema/schemas /home/sunbird/content-service-1.0-SNAPSHOT/schemas WORKDIR /home/sunbird/ CMD java -XX:+PrintFlagsFinal $JAVA_OPTIONS -cp '/home/sunbird/content-service-1.0-SNAPSHOT/lib/*' -Dconfig.file=/home/sunbird/content-service-1.0-SNAPSHOT/config/application.conf -Dlogger.file=/home/sunbird/content-service-1.0-SNAPSHOT/config/logback.xml play.core.server.ProdServerStart /home/sunbird/content-service-1.0-SNAPSHOT diff --git a/build/content-service/Jenkinsfile b/build/content-service/Jenkinsfile index 6909c139c..fb4204f8a 100644 --- a/build/content-service/Jenkinsfile +++ b/build/content-service/Jenkinsfile @@ -28,7 +28,13 @@ node('build-slave') { sh 'mvn clean install -DskipTests=true ' } + stage('schema-pull') { + dir("${env.WORKSPACE}/schema") { + checkout scm: [$class: 'GitSCM', branches: [[name: schema_repo_branch]], extensions: [[$class: 'CloneOption', depth: 1, noTags: true, reference: '', shallow: true]], userRemoteConfigs: [[credentialsId: schema_repo_credentials, url: schema_repo_url]]] + + } + } stage('Package') { dir('content-api') { sh 'mvn play2:dist -pl content-service' @@ -47,4 +53,4 @@ node('build-slave') { currentBuild.result = "FAILURE" throw err } -} \ No newline at end of file +} diff --git a/build/search-service/Dockerfile b/build/search-service/Dockerfile index be9830e24..1ac8932b1 100644 --- a/build/search-service/Dockerfile +++ b/build/search-service/Dockerfile @@ -9,6 +9,6 @@ USER sunbird COPY ./search-api/search-service/target/search-service-1.0-SNAPSHOT-dist.zip /home/sunbird/ RUN unzip /home/sunbird/search-service-1.0-SNAPSHOT-dist.zip -d /home/sunbird/ RUN rm /home/sunbird/search-service-1.0-SNAPSHOT-dist.zip -COPY --chown=sunbird ./schemas /home/sunbird/search-service-1.0-SNAPSHOT/schemas +COPY --chown=sunbird ./schema/schemas /home/sunbird/search-service-1.0-SNAPSHOT/schemas WORKDIR /home/sunbird/ CMD java -XX:+PrintFlagsFinal $JAVA_OPTIONS -cp '/home/sunbird/search-service-1.0-SNAPSHOT/lib/*' -Dconfig.file=/home/sunbird/search-service-1.0-SNAPSHOT/config/application.conf -Dlogger.file=/home/sunbird/search-service-1.0-SNAPSHOT/config/logback.xml play.core.server.ProdServerStart /home/sunbird/search-service-1.0-SNAPSHOT diff --git a/build/search-service/Jenkinsfile b/build/search-service/Jenkinsfile index a565e7da3..24909a49a 100644 --- a/build/search-service/Jenkinsfile +++ b/build/search-service/Jenkinsfile @@ -28,6 +28,13 @@ node('build-slave') { sh 'mvn clean install -DskipTests=true ' } + stage('schema-pull') { + + dir("${env.WORKSPACE}/schema") { + checkout scm: [$class: 'GitSCM', branches: [[name: schema_repo_branch]], extensions: [[$class: 'CloneOption', depth: 1, noTags: true, reference: '', shallow: true]], userRemoteConfigs: [[credentialsId: schema_repo_credentials, url: schema_repo_url]]] + + } + } stage('Package') { dir('search-api') { diff --git a/build/taxonomy-service/Dockerfile b/build/taxonomy-service/Dockerfile index 89dae255c..ea9796631 100644 --- a/build/taxonomy-service/Dockerfile +++ b/build/taxonomy-service/Dockerfile @@ -9,6 +9,6 @@ USER sunbird COPY ./taxonomy-api/taxonomy-service/target/taxonomy-service-1.0-SNAPSHOT-dist.zip /home/sunbird/ RUN unzip /home/sunbird/taxonomy-service-1.0-SNAPSHOT-dist.zip -d /home/sunbird/ RUN rm /home/sunbird/taxonomy-service-1.0-SNAPSHOT-dist.zip -COPY --chown=sunbird ./schemas /home/sunbird/taxonomy-service-1.0-SNAPSHOT/schemas +COPY --chown=sunbird ./schema/schemas /home/sunbird/taxonomy-service-1.0-SNAPSHOT/schemas WORKDIR /home/sunbird/ CMD java -XX:+PrintFlagsFinal $JAVA_OPTIONS -cp '/home/sunbird/taxonomy-service-1.0-SNAPSHOT/lib/*' -Dconfig.file=/home/sunbird/taxonomy-service-1.0-SNAPSHOT/config/application.conf -Dlogger.file=/home/sunbird/taxonomy-service-1.0-SNAPSHOT/config/logback.xml play.core.server.ProdServerStart /home/sunbird/taxonomy-service-1.0-SNAPSHOT diff --git a/build/taxonomy-service/Jenkinsfile b/build/taxonomy-service/Jenkinsfile index 453a603f6..96ae351d9 100644 --- a/build/taxonomy-service/Jenkinsfile +++ b/build/taxonomy-service/Jenkinsfile @@ -28,6 +28,13 @@ node('build-slave') { sh 'mvn clean install -DskipTests=true ' } + stage('schema-pull') { + + dir("${env.WORKSPACE}/schema") { + checkout scm: [$class: 'GitSCM', branches: [[name: schema_repo_branch]], extensions: [[$class: 'CloneOption', depth: 1, noTags: true, reference: '', shallow: true]], userRemoteConfigs: [[credentialsId: schema_repo_credentials, url: schema_repo_url]]] + + } + } stage('Package') { dir('taxonomy-api') { diff --git a/content-api/content-actors/src/main/scala/org/sunbird/content/actors/ContentActor.scala b/content-api/content-actors/src/main/scala/org/sunbird/content/actors/ContentActor.scala index 16ebcc64d..c545be727 100644 --- a/content-api/content-actors/src/main/scala/org/sunbird/content/actors/ContentActor.scala +++ b/content-api/content-actors/src/main/scala/org/sunbird/content/actors/ContentActor.scala @@ -4,16 +4,20 @@ import java.util import java.util.concurrent.CompletionException import java.io.File import org.apache.commons.io.FilenameUtils + import javax.inject.Inject +import org.apache.commons.lang3.ObjectUtils import org.apache.commons.lang3.StringUtils +import org.apache.commons.collections4.{CollectionUtils, MapUtils} +import org.slf4j.{Logger, LoggerFactory} import org.sunbird.`object`.importer.{ImportConfig, ImportManager} import org.sunbird.actor.core.BaseActor import org.sunbird.cache.impl.RedisCache -import org.sunbird.content.util.{AcceptFlagManager, ContentConstants, CopyManager, DiscardManager, FlagManager, RetireManager} +import org.sunbird.content.util.{AcceptFlagManager, ContentConstants, CopyManager, DiscardManager, FlagManager, NotificationManager, RetireManager} import org.sunbird.cloudstore.StorageService -import org.sunbird.common.{ContentParams, Platform, Slug} +import org.sunbird.common.{ContentParams, JsonUtils, Platform, Slug} import org.sunbird.common.dto.{Request, Response, ResponseHandler} -import org.sunbird.common.exception.ClientException +import org.sunbird.common.exception.{ClientException, ResponseCode} import org.sunbird.content.dial.DIALManager import org.sunbird.content.review.mgr.ReviewManager import org.sunbird.util.RequestUtil @@ -25,7 +29,9 @@ import org.sunbird.graph.utils.NodeUtil import org.sunbird.managers.HierarchyManager import org.sunbird.managers.HierarchyManager.hierarchyPrefix -import scala.collection.JavaConverters +import java.time.{ZoneId, ZonedDateTime} +import java.time.format.DateTimeFormatter +import scala.collection.{JavaConverters, Map} import scala.collection.JavaConverters._ import scala.concurrent.{ExecutionContext, Future} @@ -34,6 +40,7 @@ class ContentActor @Inject() (implicit oec: OntologyEngineContext, ss: StorageSe implicit val ec: ExecutionContext = getContext().dispatcher private lazy val importConfig = getImportConfig() private lazy val importMgr = new ImportManager(importConfig) + private val logger: Logger = LoggerFactory.getLogger("ContentActor") override def onReceive(request: Request): Future[Response] = { request.getOperation match { @@ -53,6 +60,7 @@ class ContentActor @Inject() (implicit oec: OntologyEngineContext, ss: StorageSe case "systemUpdate" => systemUpdate(request) case "reviewContent" => reviewContent(request) case "rejectContent" => rejectContent(request) + case "adminReadContent" => adminRead(request) case _ => ERROR(request.getOperation) } } @@ -60,6 +68,38 @@ class ContentActor @Inject() (implicit oec: OntologyEngineContext, ss: StorageSe def create(request: Request): Future[Response] = { populateDefaultersForCreation(request) RequestUtil.restrictProperties(request) + val startDateTimeStr = request.getRequest.getOrDefault("startDateTime", "").asInstanceOf[String] + val endDateTimeStr = request.getRequest.getOrDefault("endDateTime", "").asInstanceOf[String] + if (StringUtils.isNotBlank(startDateTimeStr) && StringUtils.isNotBlank(endDateTimeStr)) { + try { + val inputUtcFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSXX") + val outputIstFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSZ") + val istZoneId = ZoneId.of("Asia/Kolkata") + + val startDateTimeUtc = ZonedDateTime.parse(startDateTimeStr, inputUtcFormatter) + val startDateTimeIst = startDateTimeUtc.withZoneSameInstant(istZoneId) + val formattedStartDateTimeIst = startDateTimeIst.format(outputIstFormatter) + val startDateTimeEpochMillis = startDateTimeIst.toInstant.toEpochMilli + + request.getRequest.put("startDateTime", formattedStartDateTimeIst) + request.getRequest.put("startDateTimeInEpoch", startDateTimeEpochMillis.asInstanceOf[java.lang.Long]) + + val endDateTimeUtc = ZonedDateTime.parse(endDateTimeStr, inputUtcFormatter) + val endDateTimeIst = endDateTimeUtc.withZoneSameInstant(istZoneId) + val formattedEndDateTimeIst = endDateTimeIst.format(outputIstFormatter) + val endDateTimeEpochMillis = endDateTimeIst.toInstant.toEpochMilli + + request.getRequest.put("endDateTime", formattedEndDateTimeIst) + request.getRequest.put("endDateTimeInEpoch", endDateTimeEpochMillis.asInstanceOf[java.lang.Long]) + } catch { + case ex: Exception => + return Future.successful(ResponseHandler. + ERROR(ResponseCode.CLIENT_ERROR, + "ERR_INVALID_DATE_FORMAT", + "startDateTime or endDateTime is not in the expected format yyyy-MM-dd'T'HH:mm:ss.SSSXX")) + } + } + request.getRequest.put("cqfVersion", System.currentTimeMillis().toString) DataNode.create(request, dataModifier).map(node => { ResponseHandler.OK.put("identifier", node.getIdentifier).put("node_id", node.getIdentifier) .put("versionKey", node.getMetadata.get("versionKey")) @@ -73,19 +113,35 @@ class ContentActor @Inject() (implicit oec: OntologyEngineContext, ss: StorageSe DataNode.read(request).map(node => { val metadata: util.Map[String, AnyRef] = NodeUtil.serialize(node, fields, node.getObjectType.toLowerCase.replace("image", ""), request.getContext.get("version").asInstanceOf[String]) metadata.put("identifier", node.getIdentifier.replace(".img", "")) - val response: Response = ResponseHandler.OK - if (responseSchemaName.isEmpty) { - response.put("content", metadata) - } - else { - response.put(responseSchemaName, metadata) - } - if(!StringUtils.equalsIgnoreCase(metadata.get("visibility").asInstanceOf[String],"Private")) { - response - } - else { + if (StringUtils.equalsIgnoreCase(metadata.get("visibility").asInstanceOf[String],"Private")) { throw new ClientException("ERR_ACCESS_DENIED", "content visibility is private, hence access denied") } + var sa = metadata.get("secureSettings") + var securityAttribute : util.Map[String, AnyRef] = new util.HashMap[String, AnyRef] + if(sa.isInstanceOf[String]) { + securityAttribute = JsonUtils.deserialize(sa.asInstanceOf[String], classOf[java.util.Map[String, AnyRef]]) + metadata.put("secureSettings", securityAttribute) + } else if (sa.isInstanceOf[util.Map[String, AnyRef]]) { + securityAttribute = metadata.getOrDefault("secureSettings", new util.HashMap[String, AnyRef]).asInstanceOf[util.Map[String, AnyRef]] + } + //var securityAttribute : util.Map[String, AnyRef] = metadata.getOrDefault("secureSettings", new util.HashMap[String, AnyRef]).asInstanceOf[util.Map[String, AnyRef]] + if (MapUtils.isNotEmpty(securityAttribute)) { + var orgList : util.ArrayList[String] = securityAttribute.getOrDefault("organisation", new util.ArrayList[String]).asInstanceOf[util.ArrayList[String]] + if (!CollectionUtils.isEmpty(orgList)) { + //Content should be read by unique org users only. + var userChannelId : String = request.getRequest.getOrDefault("x-user-channel-id", "").asInstanceOf[String] + if (!orgList.contains(userChannelId)) { + throw new ClientException("ERR_ACCESS_DENIED", "User is not allowed to read this content.") + } + } + } + val response: Response = ResponseHandler.OK + if (responseSchemaName.isEmpty) { + response.put("content", metadata) + } else { + response.put(responseSchemaName, metadata) + } + response }) } @@ -114,11 +170,60 @@ class ContentActor @Inject() (implicit oec: OntologyEngineContext, ss: StorageSe } def update(request: Request): Future[Response] = { + val startDateTimeStr = request.getRequest.getOrDefault("startDateTime", "").asInstanceOf[String] + val endDateTimeStr = request.getRequest.getOrDefault("endDateTime", "").asInstanceOf[String] + if (StringUtils.isNotBlank(startDateTimeStr) && StringUtils.isNotBlank(endDateTimeStr)) { + try { + val inputUtcFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSXX") + val outputIstFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSZ") + val istZoneId = ZoneId.of("Asia/Kolkata") + + val startDateTimeUtc = ZonedDateTime.parse(startDateTimeStr, inputUtcFormatter) + val startDateTimeIst = startDateTimeUtc.withZoneSameInstant(istZoneId) + val formattedStartDateTimeIst = startDateTimeIst.format(outputIstFormatter) + val startDateTimeEpochMillis = startDateTimeIst.toInstant.toEpochMilli + + request.getRequest.put("startDateTime", formattedStartDateTimeIst) + request.getRequest.put("startDateTimeInEpoch", startDateTimeEpochMillis.asInstanceOf[java.lang.Long]) + + val endDateTimeUtc = ZonedDateTime.parse(endDateTimeStr, inputUtcFormatter) + val endDateTimeIst = endDateTimeUtc.withZoneSameInstant(istZoneId) + val formattedEndDateTimeIst = endDateTimeIst.format(outputIstFormatter) + val endDateTimeEpochMillis = endDateTimeIst.toInstant.toEpochMilli + + request.getRequest.put("endDateTime", formattedEndDateTimeIst) + request.getRequest.put("endDateTimeInEpoch", endDateTimeEpochMillis.asInstanceOf[java.lang.Long]) + } catch { + case ex: Exception => + return Future.successful(ResponseHandler. + ERROR(ResponseCode.CLIENT_ERROR, + "ERR_INVALID_DATE_FORMAT", + "startDateTime or endDateTime is not in the expected format yyyy-MM-dd'T'HH:mm:ss.SSSXX")) + } + } populateDefaultersForUpdation(request) if (StringUtils.isBlank(request.getRequest.getOrDefault("versionKey", "").asInstanceOf[String])) throw new ClientException("ERR_INVALID_REQUEST", "Please Provide Version Key!") RequestUtil.restrictProperties(request) + val reviewStatus: String = request.getRequest.getOrDefault("reviewStatus", "").asInstanceOf[String] + if(reviewStatus == null || reviewStatus.isEmpty ) { + request.getRequest.put("cqfVersion", System.currentTimeMillis().toString) + } DataNode.update(request, dataModifier).map(node => { val identifier: String = node.getIdentifier.replace(".img", "") + if (request.getContext.getOrDefault("sendNotification", Boolean.box(false)).asInstanceOf[Boolean]) { + try { + NotificationManager.sendNotification( + "CONTENT_EDITED", + "UPDATE", + List(node.getMetadata.get("createdBy").asInstanceOf[String]), + node.getMetadata.get("name").asInstanceOf[String], + Map[String, Any]("id" -> node.getMetadata.get("identifier").asInstanceOf[String]) + ) + + } catch { + case e: Exception => logger.info("Error while sending notification ", e) + } + } ResponseHandler.OK.put("node_id", identifier).put("identifier", identifier) .put("versionKey", node.getMetadata.get("versionKey")) }) @@ -188,10 +293,30 @@ class ContentActor @Inject() (implicit oec: OntologyEngineContext, ss: StorageSe request.getContext.put("schemaName", node.getObjectType.toLowerCase()) if (StringUtils.equalsAnyIgnoreCase("Processing", node.getMetadata.getOrDefault("status", "").asInstanceOf[String])) throw new ClientException("ERR_NODE_ACCESS_DENIED", "Review Operation Can't Be Applied On Node Under Processing State") - else ReviewManager.review(request, node) + else { + val response = ReviewManager.review(request, node) + try { + val reviewers = node.getMetadata.get("reviewerIDs") match { + case arr: Array[String] => arr.toList + case list: java.util.List[_] => list.asScala.toList.map(_.toString) + case other => throw new RuntimeException(s"Unexpected type for reviewerIDs: ${other.getClass}") + } + NotificationManager.sendNotification( + "CONTENT_REVIEW_REQUEST", + "ALERT", + reviewers, + node.getMetadata.get("name").asInstanceOf[String], + Map[String, Any]("id" -> node.getMetadata.get("identifier").asInstanceOf[String]) + ) + } catch { + case e: Exception => logger.info("Error while sending notification ", e) + } + response + } }).flatMap(f => f) } + def populateDefaultersForCreation(request: Request) = { setDefaultsBasedOnMimeType(request, ContentParams.create.name) setDefaultLicense(request) @@ -260,6 +385,7 @@ class ContentActor @Inject() (implicit oec: OntologyEngineContext, ss: StorageSe val identifier = request.getContext.get("identifier").asInstanceOf[String] RequestUtil.validateRequest(request) RedisCache.delete(hierarchyPrefix + request.get("rootId")) + RedisCache.delete(identifier) val readReq = new Request(request) val identifiers = new util.ArrayList[String](){{ @@ -275,6 +401,17 @@ class ContentActor @Inject() (implicit oec: OntologyEngineContext, ss: StorageSe else DataNode.systemUpdate(request, response,"", None) }).map(node => { + try { + NotificationManager.sendNotification( + "CONTENT_EDITED", + "UPDATE", + List(node.getMetadata.get("createdBy").asInstanceOf[String]), + node.getMetadata.get("name").asInstanceOf[String], + Map[String, Any]("id" -> node.getMetadata.get("identifier").asInstanceOf[String]) + ) + } catch { + case e: Exception => logger.info("Error while sending notification ", e) + } ResponseHandler.OK.put("identifier", identifier).put("status", "success") }) } @@ -300,9 +437,39 @@ class ContentActor @Inject() (implicit oec: OntologyEngineContext, ss: StorageSe RequestUtil.restrictProperties(request) DataNode.update(request).map(node => { val identifier: String = node.getIdentifier.replace(".img", "") + try { + if(node.getMetadata.containsKey("reviewerIDs")) { + NotificationManager.sendNotification( + "CONTENT_REJECTED", + "UPDATE", + List(node.getMetadata.get("createdBy").asInstanceOf[String]), + node.getMetadata.get("name").asInstanceOf[String], + Map[String, Any]("id" -> node.getMetadata.get("identifier").asInstanceOf[String]) + ) + } + } catch { + case e: Exception => logger.info("Error while sending notification ", e) + } ResponseHandler.OK.put("node_id", identifier).put("identifier", identifier) }) }).flatMap(f => f) } + def adminRead(request: Request): Future[Response] = { + val responseSchemaName: String = request.getContext.getOrDefault(ContentConstants.RESPONSE_SCHEMA_NAME, "").asInstanceOf[String] + val fields: util.List[String] = JavaConverters.seqAsJavaListConverter(request.get("fields").asInstanceOf[String].split(",").filter(field => StringUtils.isNotBlank(field) && !StringUtils.equalsIgnoreCase(field, "null"))).asJava + request.getRequest.put("fields", fields) + DataNode.read(request).map(node => { + val metadata: util.Map[String, AnyRef] = NodeUtil.serialize(node, fields, node.getObjectType.toLowerCase.replace("image", ""), request.getContext.get("version").asInstanceOf[String]) + metadata.put("identifier", node.getIdentifier.replace(".img", "")) + val response: Response = ResponseHandler.OK + if (responseSchemaName.isEmpty) { + response.put("content", metadata) + } else { + response.put(responseSchemaName, metadata) + } + response + }) + } + } diff --git a/content-api/content-actors/src/main/scala/org/sunbird/content/actors/EventActor.scala b/content-api/content-actors/src/main/scala/org/sunbird/content/actors/EventActor.scala index cd42072d8..040d45f57 100644 --- a/content-api/content-actors/src/main/scala/org/sunbird/content/actors/EventActor.scala +++ b/content-api/content-actors/src/main/scala/org/sunbird/content/actors/EventActor.scala @@ -1,6 +1,9 @@ package org.sunbird.content.actors import org.apache.commons.lang.StringUtils +import org.slf4j.{Logger, LoggerFactory} +import org.sunbird.cache.impl.RedisCache +import org.sunbird.common.Platform import org.sunbird.cloudstore.StorageService import org.sunbird.common.dto.{Request, Response, ResponseHandler} import org.sunbird.common.exception.{ClientException, ResponseCode} @@ -8,14 +11,20 @@ import org.sunbird.content.util.ContentConstants import org.sunbird.graph.OntologyEngineContext import org.sunbird.graph.dac.model.{Node, Relation} import org.sunbird.graph.nodes.DataNode +import org.sunbird.telemetry.logger.TelemetryManager +import org.sunbird.telemetry.util.LogTelemetryEventUtil +import org.sunbird.util.RequestUtil import java.util import javax.inject.Inject import scala.collection.JavaConverters.asScalaBufferConverter import scala.concurrent.Future +import scala.collection.JavaConversions._ +import scala.collection.JavaConverters +import scala.collection.JavaConverters._ class EventActor @Inject()(implicit oec: OntologyEngineContext, ss: StorageService) extends ContentActor { - + private val logger: Logger = LoggerFactory.getLogger("EventActor") override def onReceive(request: Request): Future[Response] = { request.getOperation match { case "createContent" => create(request) @@ -24,27 +33,56 @@ class EventActor @Inject()(implicit oec: OntologyEngineContext, ss: StorageServi case "retireContent" => retire(request) case "discardContent" => discard(request) case "publishContent" => publish(request) + case "rejectEvent" => rejectEvent(request) + case "systemUpdate" => systemUpdate(request) case _ => ERROR(request.getOperation) } } override def update(request: Request): Future[Response] = { - verifyStandaloneEventAndApply(super.update, request, Some(node => { - if (!"Draft".equalsIgnoreCase(node.getMetadata.getOrDefault("status", "").toString)) { - throw new ClientException(ContentConstants.ERR_CONTENT_NOT_DRAFT, "Update not allowed! Event status isn't draft") - } - })) + populateDefaultersForUpdation(request) + val versionKey = request.getRequest.getOrDefault("versionKey", "").asInstanceOf[String] + if (StringUtils.isBlank(versionKey)) { + throw new ClientException("ERR_INVALID_REQUEST", "Please Provide Version Key!") + } + RequestUtil.restrictProperties(request) + val reviewStatus: String = request.getRequest.getOrDefault("reviewStatus", "").asInstanceOf[String] + if (reviewStatus == null || reviewStatus.isEmpty) { + request.getRequest.put("cqfVersion", System.currentTimeMillis().toString) + } + DataNode.update(request, dataModifier).map(node => { + val identifier: String = node.getIdentifier.replace(".img", "") + ResponseHandler.OK.put("node_id", identifier) + .put("identifier", identifier) + .put("versionKey", node.getMetadata.get("versionKey")) + }) } def publish(request: Request): Future[Response] = { - verifyStandaloneEventAndApply(super.update, request, Some(node => { - if (!"Draft".equalsIgnoreCase(node.getMetadata.getOrDefault("status", "").toString)) { - throw new ClientException(ContentConstants.ERR_CONTENT_NOT_DRAFT, "Publish not allowed! Event status isn't draft") + TelemetryManager.log("EventActor::publish Identifier: " + request.getRequest.getOrDefault("identifier", "")) + val identifier = request.get("identifier").asInstanceOf[String] + val updatedIdentifier = if (!identifier.endsWith(".img")) s"$identifier.img" else identifier + request.put("identifier", updatedIdentifier) + // Check if the node exists + DataNode.read(request).flatMap { node => + // If the node exists, proceed with update and delete + DataNode.updatev2(request, flag = true).flatMap { _ => + DataNode.delete(request) + try { + RedisCache.delete(identifier) + } catch { + case e: Exception => + logger.error(s"Error deleting Redis cache entry for identifier: $identifier", e) + } + request.put("identifier", updatedIdentifier.replace(".img", "")) + verifyStandaloneEventAndApply(super.update, request, true) } - val versionKey = node.getMetadata.getOrDefault("versionKey", "").toString - if (StringUtils.isNotBlank(versionKey)) - request.put("versionKey", versionKey) - })) + }.recoverWith { + case ex: Exception => + // If the node does not exist, directly call verifyStandaloneEventAndApply + request.put("identifier", updatedIdentifier.replace(".img", "")) + verifyStandaloneEventAndApply(super.update, request, true) + } } override def discard(request: Request): Future[Response] = { @@ -55,7 +93,7 @@ class EventActor @Inject()(implicit oec: OntologyEngineContext, ss: StorageServi verifyStandaloneEventAndApply(super.retire, request) } - private def verifyStandaloneEventAndApply(f: Request => Future[Response], request: Request, dataUpdater: Option[Node => Unit] = None): Future[Response] = { + private def verifyStandaloneEventAndApply(f: Request => Future[Response], request: Request, isPublish: Boolean = false, dataUpdater: Option[Node => Unit] = None): Future[Response] = { DataNode.read(request).flatMap(node => { val inRelations = if (node.getInRelations == null) new util.ArrayList[Relation]() else node.getInRelations; val hasEventSetParent = inRelations.asScala.exists(rel => "EventSet".equalsIgnoreCase(rel.getStartNodeObjectType)) @@ -65,18 +103,109 @@ class EventActor @Inject()(implicit oec: OntologyEngineContext, ss: StorageServi if (dataUpdater.isDefined) { dataUpdater.get.apply(node) } - f.apply(request) + f.apply(request).flatMap(response => { + // Check if the response is OK + if (response.getResponseCode == ResponseCode.OK) { + if (isPublish) { + TelemetryManager.log("EventActor::verifyStandaloneEventAndApply publish request for Identifier: " + request.getRequest.getOrDefault("identifier", "")) + pushInstructionEvent(node.getIdentifier, node) + } else { + TelemetryManager.log("EventActor::verifyStandaloneEventAndApply Identifier: " + request.getRequest.getOrDefault("identifier", "")) + } + Future.successful(response) + } else { + // Return the response if it's not OK as it is + Future.successful(response) + } + }) } }) } override def dataModifier(node: Node): Node = { + TelemetryManager.log("EventActor::dataModifier Identifier: " + node.getIdentifier) if (node.getMetadata.containsKey("trackable") && node.getMetadata.getOrDefault("trackable", new java.util.HashMap[String, AnyRef]).asInstanceOf[java.util.Map[String, AnyRef]].containsKey("enabled") && "Yes".equalsIgnoreCase(node.getMetadata.getOrDefault("trackable", new java.util.HashMap[String, AnyRef]).asInstanceOf[java.util.Map[String, AnyRef]].getOrDefault("enabled", "").asInstanceOf[String])) { node.getMetadata.put("contentType", "Event") + node.getMetadata.put("objectType", "Event") } node } + @throws[Exception] + def pushInstructionEvent(identifier: String, node: Node)(implicit oec: OntologyEngineContext): Unit = { + val (actor, context, objData, eData) = generateInstructionEventMetadata(identifier.replace(".img", ""), node) + val beJobRequestEvent: String = LogTelemetryEventUtil.logInstructionEvent(actor.asJava, context.asJava, objData.asJava, eData) + val topic: String = Platform.getString("kafka.topics.event.publish", "dev.publish.job.request") + if (StringUtils.isBlank(beJobRequestEvent)) throw new ClientException("BE_JOB_REQUEST_EXCEPTION", "Event is not generated properly.") + oec.kafkaClient.send(beJobRequestEvent, topic) + } + + def generateInstructionEventMetadata(identifier: String, node: Node): (Map[String, AnyRef], Map[String, AnyRef], Map[String, AnyRef], util.Map[String, AnyRef]) = { + val metadata: util.Map[String, AnyRef] = node.getMetadata + val publishType = if (StringUtils.equalsIgnoreCase(metadata.getOrDefault("status", "").asInstanceOf[String], "Unlisted")) "unlisted" else "public" + val eventMetadata = Map("identifier" -> identifier, "mimeType" -> metadata.getOrDefault("mimeType", ""), "objectType" -> node.getObjectType.replace("Image", ""), "pkgVersion" -> metadata.getOrDefault("pkgVersion", 0.asInstanceOf[AnyRef]), "lastPublishedBy" -> metadata.getOrDefault("lastPublishedBy", "")) + val actor = Map("id" -> s"${node.getObjectType.toLowerCase().replace("image", "")}-publish", "type" -> "System".asInstanceOf[AnyRef]) + val context = Map("channel" -> metadata.getOrDefault("channel", ""), "pdata" -> Map("id" -> "org.sunbird.platform", "ver" -> "1.0").asJava, "env" -> Platform.getString("cloud_storage.env", "dev")) + val objData = Map("id" -> identifier, "ver" -> metadata.getOrDefault("versionKey", "")) + val eData: util.Map[String, AnyRef] = new util.HashMap[String, AnyRef] {{ + put("action", "publish") + put("publish_type", publishType) + put("metadata", eventMetadata.asJava) + }} + (actor, context, objData, eData) + } + + def rejectEvent(request: Request): Future[Response] = { + RequestUtil.validateRequest(request) + DataNode.read(request).map(node => { + val status = node.getMetadata.get("status").asInstanceOf[String] + if (StringUtils.isBlank(status)) { + throw new ClientException("ERR_METADATA_ISSUE", "Event metadata error, status is blank for identifier:" + node.getIdentifier) + } + if (StringUtils.equals("sentToPublish", status) || StringUtils.equalsIgnoreCase("sentToPublish",status)) { + request.getRequest.put("status", "Rejected") + request.getRequest.put("prevStatus", "sentToPublish") + } + + else new ClientException("ERR_INVALID_REQUEST", "Content not in Review status.") + request.getRequest.put("versionKey", node.getMetadata.get("versionKey")) + request.putIn("publishChecklist", null).putIn("publishComment", null) + RequestUtil.restrictProperties(request) + DataNode.update(request).map(node => { + val identifier: String = node.getIdentifier.replace(".img", "") + ResponseHandler.OK.put("node_id", identifier).put("identifier", identifier) + }) + }).flatMap(f => f) + } + + override def systemUpdate(request: Request): Future[Response] = { + RedisCache.delete(request.get("identifier").asInstanceOf[String]) + val identifier = request.get("identifier").asInstanceOf[String] + val updatedIdentifier = if (!identifier.endsWith(".img")) s"$identifier.img" else identifier + DataNode.read(request).flatMap { node => + // Extract attributes to be updated from the request body + val attributesToUpdate = request.getRequest.asInstanceOf[java.util.Map[String, AnyRef]] + + // Append attributes from the request to the node's metadata + attributesToUpdate.forEach(new java.util.function.BiConsumer[String, AnyRef] { + override def accept(key: String, value: AnyRef): Unit = { + node.getMetadata.put(key, value) + } + }) + // Save the updated node + DataNode.updatev2(request, _ => node, flag = false).recover { + case ex: Exception => + TelemetryManager.error("Error occurred during updatev2 operation", ex) + ResponseHandler.ERROR(ResponseCode.SERVER_ERROR, "ERR_UPDATE_FAILED", ex.getMessage) + }.map(response => { + if (response.getResponseCode == ResponseCode.OK) { + ResponseHandler.OK + } else { + response + } + }) + } + } } \ No newline at end of file diff --git a/content-api/content-actors/src/main/scala/org/sunbird/content/util/CopyManager.scala b/content-api/content-actors/src/main/scala/org/sunbird/content/util/CopyManager.scala index cbf52207d..15b3b266f 100644 --- a/content-api/content-actors/src/main/scala/org/sunbird/content/util/CopyManager.scala +++ b/content-api/content-actors/src/main/scala/org/sunbird/content/util/CopyManager.scala @@ -38,7 +38,7 @@ object CopyManager { private val originMetadataKeys: util.List[String] = Platform.getStringList("content.copy.origin_data", new util.ArrayList[String]()) private val internalHierarchyProps = List("identifier", "parent", "index", "depth") private val restrictedMimeTypesForUpload = List("application/vnd.ekstep.ecml-archive","application/vnd.ekstep.content-collection") - + private val copyArtifactUrl = Platform.config.getBoolean("content.copy.is_copy_artifacturl") private var copySchemeMap: util.Map[String, AnyRef] = new util.HashMap[String, AnyRef]() def copy(request: Request)(implicit ec: ExecutionContext, oec: OntologyEngineContext, ss: StorageService): Future[Response] = { @@ -73,7 +73,11 @@ object CopyManager { val copyCreateReq: Future[Request] = getCopyRequest(node, request) copyCreateReq.map(req => { DataNode.create(req).map(copiedNode => { - artifactUpload(node, copiedNode, request) + if(copyArtifactUrl){ + artifactUpload(node, copiedNode, request) + }else{ + Future(copiedNode) + } }).flatMap(f => f) }).flatMap(f => f) } diff --git a/content-api/content-actors/src/main/scala/org/sunbird/content/util/DiscardManager.scala b/content-api/content-actors/src/main/scala/org/sunbird/content/util/DiscardManager.scala index 6a2aa5ed6..13f17d240 100644 --- a/content-api/content-actors/src/main/scala/org/sunbird/content/util/DiscardManager.scala +++ b/content-api/content-actors/src/main/scala/org/sunbird/content/util/DiscardManager.scala @@ -48,8 +48,7 @@ object DiscardManager { } def validateRequest(request: Request): Unit = { - if (StringUtils.isBlank(request.getRequest.getOrDefault(ContentConstants.IDENTIFIER, "").asInstanceOf[String]) - || StringUtils.endsWith(request.getRequest.getOrDefault(ContentConstants.IDENTIFIER, "").asInstanceOf[String], ContentConstants.IMAGE_SUFFIX)) + if (StringUtils.isBlank(request.getRequest.getOrDefault(ContentConstants.IDENTIFIER, "").asInstanceOf[String])) throw new ClientException(ContentConstants.ERR_INVALID_CONTENT_ID, "Please provide valid content identifier") } diff --git a/content-api/content-actors/src/main/scala/org/sunbird/content/util/NotificationManager.scala b/content-api/content-actors/src/main/scala/org/sunbird/content/util/NotificationManager.scala new file mode 100644 index 000000000..e59df56ad --- /dev/null +++ b/content-api/content-actors/src/main/scala/org/sunbird/content/util/NotificationManager.scala @@ -0,0 +1,38 @@ +package org.sunbird.content.util + +import com.mashape.unirest.http.Unirest +import org.slf4j.{Logger, LoggerFactory} +import org.sunbird.common.{JsonUtils, Platform} +import org.sunbird.util.HTTPResponse +import scala.collection.JavaConverters._ + +object NotificationManager { + + private val logger: Logger = LoggerFactory.getLogger("NotificationManager") + + def sendNotification(subCategory: String, subType: String, userIds: List[String], title: String, data: collection.Map[String, Any]): Unit = { + + logger.info("Notification construction started") + + val bodyMap = Map( + "subCategory" -> subCategory, + "subType" -> subType, + "userIds" -> userIds.asJava, + "message" -> Map("placeholders" -> Map("title" -> title).asJava, "data" -> data.asJava).asJava + ).asJava + + val body = JsonUtils.serialize(bodyMap) + + val url = Platform.getString("notification.api.url", "http://cb-notification-wrapper-service:8081/notifications/create") + logger.info("Started sending notification with body {}", body) + val response = Unirest.post(url) + .header("Content-Type", "application/json") + .body(body) + .asString() + + logger.info("Successfully sent notification status: {}, body: {}", response.getStatus, response.getBody) + + HTTPResponse(response.getStatus, response.getBody) + } + +} \ No newline at end of file diff --git a/content-api/content-actors/src/main/scala/org/sunbird/content/util/RetireManager.scala b/content-api/content-actors/src/main/scala/org/sunbird/content/util/RetireManager.scala index cbd8e4eba..2be7ca06c 100644 --- a/content-api/content-actors/src/main/scala/org/sunbird/content/util/RetireManager.scala +++ b/content-api/content-actors/src/main/scala/org/sunbird/content/util/RetireManager.scala @@ -50,7 +50,7 @@ object RetireManager { private def validateRequest(request: Request) = { val contentId: String = request.get(ContentConstants.IDENTIFIER).asInstanceOf[String] - if (StringUtils.isBlank(contentId) || StringUtils.endsWithIgnoreCase(contentId, HierarchyConstants.IMAGE_SUFFIX)) + if (StringUtils.isBlank(contentId)) throw new ClientException(ContentConstants.ERR_INVALID_CONTENT_ID, "Please Provide Valid Content Identifier.") } diff --git a/content-api/content-service/app/controllers/BaseController.scala b/content-api/content-service/app/controllers/BaseController.scala index 4fc36e06e..f915e9240 100644 --- a/content-api/content-service/app/controllers/BaseController.scala +++ b/content-api/content-service/app/controllers/BaseController.scala @@ -26,6 +26,7 @@ abstract class BaseController(protected val cc: ControllerComponents)(implicit e new util.HashMap[String, AnyRef]()).asInstanceOf[java.util.Map[String, AnyRef]] val mimeTypesToCheck = List("application/vnd.ekstep.h5p-archive", "application/vnd.ekstep.html-archive", "application/vnd.android.package-archive", "video/webm", "video/x-youtube", "video/mp4") + val actorTimeout: Long = Platform.getLong("actor.timeoutMillisec", 120000L) def requestBody()(implicit request: Request[AnyContent]) = { val body = request.body.asJson.getOrElse("{}").toString @@ -83,7 +84,7 @@ abstract class BaseController(protected val cc: ControllerComponents)(implicit e } def getResult(apiId: String, actor: ActorRef, request: org.sunbird.common.dto.Request, categoryMapping: Boolean = false, version: String = "3.0") : Future[Result] = { - val future = Patterns.ask(actor, request, 30000) recoverWith {case e: Exception => Future(ResponseHandler.getErrorResponse(e))} + val future = Patterns.ask(actor, request, actorTimeout) recoverWith {case e: Exception => Future(ResponseHandler.getErrorResponse(e))} future.map(f => { val result: Response = f.asInstanceOf[Response] result.setId(apiId) @@ -208,4 +209,19 @@ abstract class BaseController(protected val cc: ControllerComponents)(implicit e Future(BadRequest(JavaJsonUtils.serialize(result)).as("application/json")) } + def commonReadHeaders(ignoreHeaders: Option[List[String]] = Option(List()))(implicit request: Request[AnyContent]): java.util.Map[String, Object] = { + val customHeaders = Map("x-authenticated-user-orgid" -> "x-user-channel-id", "x-channel-id" -> "channel", "X-Consumer-ID" -> "consumerId", "X-App-Id" -> "appId").filterKeys(key => !ignoreHeaders.getOrElse(List()).contains(key)) + customHeaders.map(ch => { + val value = request.headers.get(ch._1) + if (value.isDefined && !value.isEmpty) { + collection.mutable.HashMap[String, Object](ch._2 -> value.get).asJava + } else { + collection.mutable.HashMap[String, Object]().asJava + } + }).reduce((a, b) => { + a.putAll(b) + return a + }) + } + } diff --git a/content-api/content-service/app/controllers/v3/ContentController.scala b/content-api/content-service/app/controllers/v3/ContentController.scala index 05c5b2470..a5c73c51a 100644 --- a/content-api/content-service/app/controllers/v3/ContentController.scala +++ b/content-api/content-service/app/controllers/v3/ContentController.scala @@ -43,7 +43,7 @@ class ContentController @Inject()(@Named(ActorNames.CONTENT_ACTOR) contentActor: * @return */ def read(identifier: String, mode: Option[String], fields: Option[String]) = Action.async { implicit request => - val headers = commonHeaders() + val headers = commonReadHeaders() val content = new java.util.HashMap().asInstanceOf[java.util.Map[String, Object]] content.putAll(headers) content.putAll(Map("identifier" -> identifier, "mode" -> mode.getOrElse("read"), "fields" -> fields.getOrElse("")).asJava) @@ -94,7 +94,7 @@ class ContentController @Inject()(@Named(ActorNames.CONTENT_ACTOR) contentActor: } def getHierarchy(identifier: String, mode: Option[String]) = Action.async { implicit request => - val headers = commonHeaders() + val headers = commonReadHeaders() val content = new java.util.HashMap().asInstanceOf[java.util.Map[String, Object]] content.putAll(headers) content.putAll(Map("rootId" -> identifier, "mode" -> mode.getOrElse("")).asJava) diff --git a/content-api/content-service/app/controllers/v4/ContentController.scala b/content-api/content-service/app/controllers/v4/ContentController.scala index ddf2dc45c..65cb1b6c3 100644 --- a/content-api/content-service/app/controllers/v4/ContentController.scala +++ b/content-api/content-service/app/controllers/v4/ContentController.scala @@ -75,6 +75,7 @@ class ContentController @Inject()(@Named(ActorNames.CONTENT_ACTOR) contentActor: val contentRequest = getRequest(content, headers, "updateContent") setRequestContext(contentRequest, version, objectType, schemaName) contentRequest.getContext.put("identifier", identifier); + contentRequest.getContext.put("sendNotification", Boolean.box(true)) getResult(ApiId.UPDATE_CONTENT, contentActor, contentRequest, version = apiVersion) } @@ -208,4 +209,14 @@ class ContentController @Inject()(@Named(ActorNames.CONTENT_ACTOR) contentActor: getResult(ApiId.REJECT_CONTENT, contentActor, contentRequest, version = apiVersion) } + def adminRead(identifier: String, mode: Option[String], fields: Option[String]) = Action.async { implicit request => + val headers = commonHeaders() + val content = new java.util.HashMap().asInstanceOf[java.util.Map[String, Object]] + content.putAll(headers) + content.putAll(Map("identifier" -> identifier, "mode" -> mode.getOrElse("read"), "fields" -> fields.getOrElse("")).asJava) + val readRequest = getRequest(content, headers, "adminReadContent") + setRequestContext(readRequest, version, objectType, schemaName) + getResult(ApiId.READ_PRIVATE_CONTENT, contentActor, readRequest, version = apiVersion) + } + } diff --git a/content-api/content-service/app/controllers/v4/EventController.scala b/content-api/content-service/app/controllers/v4/EventController.scala index 3b609d32b..93d215cd2 100644 --- a/content-api/content-service/app/controllers/v4/EventController.scala +++ b/content-api/content-service/app/controllers/v4/EventController.scala @@ -44,20 +44,18 @@ class EventController @Inject()(@Named(ActorNames.EVENT_ACTOR) eventActor: Actor val headers = commonHeaders() val body = requestBody() val content = body.getOrDefault(schemaName, new java.util.HashMap()).asInstanceOf[java.util.Map[String, Object]]; - if (content.containsKey("status")) { - getErrorResponse(ApiId.UPDATE_EVENT, apiVersion, "VALIDATION_ERROR", "status update is restricted, use status APIs.") - } else { - content.putAll(headers) - val contentRequest = getRequest(content, headers, "updateContent") - setRequestContext(contentRequest, version, objectType, schemaName) - contentRequest.getContext.put("identifier", identifier); - getResult(ApiId.UPDATE_EVENT, eventActor, contentRequest, version = apiVersion) - } + content.putAll(headers) + val contentRequest = getRequest(content, headers, "updateContent") + setRequestContext(contentRequest, version, objectType, schemaName) + contentRequest.getContext.put("identifier", identifier); + getResult(ApiId.UPDATE_EVENT, eventActor, contentRequest, version = apiVersion) + } def publish(identifier: String): Action[AnyContent] = Action.async { implicit request => val headers = commonHeaders() - val content = new java.util.HashMap[String, Object]() + val body = requestBody() + val content = body.getOrDefault(schemaName, new java.util.HashMap()).asInstanceOf[java.util.Map[String, Object]]; content.put("status", "Live") content.put("identifier", identifier) content.putAll(headers) @@ -67,4 +65,38 @@ class EventController @Inject()(@Named(ActorNames.EVENT_ACTOR) eventActor: Actor getResult(ApiId.PUBLISH_EVENT, eventActor, contentRequest, version = apiVersion) } + override def retire(identifier: String) = Action.async { implicit request => + val headers = commonHeaders() + val body = requestBody() + val content = body.getOrDefault(schemaName, new java.util.HashMap()).asInstanceOf[java.util.Map[String, Object]] + content.put("identifier", identifier) + content.putAll(headers) + val contentRequest = getRequest(content, headers, "retireContent") + setRequestContext(contentRequest, version, objectType, schemaName) + getResult(ApiId.RETIRE_CONTENT, eventActor, contentRequest, version = apiVersion) + } + + override def reviewReject(identifier: String) = Action.async { implicit request => + val headers = commonHeaders() + val body = requestBody() + val content = body.getOrDefault(schemaName, new java.util.HashMap()).asInstanceOf[java.util.Map[String, Object]]; + content.putAll(headers) + content.putAll(Map("identifier" -> identifier).asJava) + val contentRequest = getRequest(content, headers, "rejectEvent") + contentRequest.put("mode", "edit") + setRequestContext(contentRequest, version, objectType, schemaName) + contentRequest.getContext.put("identifier", identifier); + getResult(ApiId.REJECT_EVENT, eventActor, contentRequest, version = apiVersion) + } + + override def systemUpdate(identifier: String) = Action.async { implicit request => + val headers = commonHeaders() + val body = requestBody() + val content = body.getOrDefault(schemaName, new java.util.HashMap()).asInstanceOf[java.util.Map[String, Object]]; + content.putAll(headers) + val contentRequest = getRequest(content, headers, "systemUpdate") + setRequestContext(contentRequest, version, objectType, schemaName) + contentRequest.getContext.put("identifier", identifier); + getResult(ApiId.SYSTEM_UPDATE_CONTENT, eventActor, contentRequest, version = apiVersion) + } } \ No newline at end of file diff --git a/content-api/content-service/app/utils/ApiId.scala b/content-api/content-service/app/utils/ApiId.scala index 7aa95ac57..035ffc7cf 100644 --- a/content-api/content-service/app/utils/ApiId.scala +++ b/content-api/content-service/app/utils/ApiId.scala @@ -100,4 +100,5 @@ object ApiId { val IMPORT_CSV = "api.collection.import" val EXPORT_CSV = "api.collection.export" + val REJECT_EVENT = "api.event.review.reject" } diff --git a/content-api/content-service/conf/application.conf b/content-api/content-service/conf/application.conf index 33ac60598..2cfa3342c 100644 --- a/content-api/content-service/conf/application.conf +++ b/content-api/content-service/conf/application.conf @@ -588,6 +588,7 @@ collection.image.migration.enabled=true cloud_storage.upload.url.ttl=600 composite.search.url="https://dev.sunbirded.org/action/composite/v3/search" +notification.api.url="http://cb-notification-wrapper-service:8081/notifications/create" # Enable Suggested Framework in Get Channel API. channel.fetch.suggested_frameworks=true @@ -736,3 +737,14 @@ collection { } } } + +#Index file validation +indexHtmlValidation.env=false +# Resource change for root Hierarchy level +hierarchyUpdate.allow.resource.at.root.level=false + +#timeout +actor.timeoutMillisec = 120000 + +# Artifact Url allowed for copy resource +content.copy.is_copy_artifacturl= true \ No newline at end of file diff --git a/content-api/content-service/conf/routes b/content-api/content-service/conf/routes index fc4c8ad83..cce91ce12 100644 --- a/content-api/content-service/conf/routes +++ b/content-api/content-service/conf/routes @@ -87,6 +87,7 @@ POST /content/v4/create controllers.v4.ContentControl PATCH /content/v4/update/:identifier controllers.v4.ContentController.update(identifier:String) GET /content/v4/read/:identifier controllers.v4.ContentController.read(identifier:String, mode:Option[String], fields:Option[String]) GET /content/v4/private/read/:identifier controllers.v4.ContentController.privateRead(identifier:String, mode:Option[String], fields:Option[String]) +GET /content/v4/admin/read/:identifier controllers.v4.ContentController.adminRead(identifier:String, mode:Option[String], fields:Option[String]) POST /content/v4/upload/url/:identifier controllers.v4.ContentController.uploadPreSigned(identifier:String, type: Option[String]) POST /content/v4/upload/:identifier controllers.v4.ContentController.upload(identifier:String, fileFormat: Option[String], validation: Option[String]) POST /content/v4/copy/:identifier controllers.v4.ContentController.copy(identifier:String, mode:Option[String], type:String ?= "deep") @@ -112,6 +113,8 @@ POST /event/v4/publish/:identifier controllers.v4.EventControll GET /event/v4/read/:identifier controllers.v4.EventController.read(identifier:String, mode:Option[String], fields:Option[String]) DELETE /event/v4/discard/:identifier controllers.v4.EventController.discard(identifier:String) DELETE /private/event/v4/retire/:identifier controllers.v4.EventController.retire(identifier:String) +POST /event/v4/reject/:identifier controllers.v4.EventController.reviewReject(identifier:String) +PATCH /event/v4/system/update/:identifier controllers.v4.EventController.systemUpdate(identifier:String) # EventSet v4 Api's POST /eventset/v4/create controllers.v4.EventSetController.create diff --git a/content-api/hierarchy-manager/src/main/scala/org/sunbird/managers/HierarchyManager.scala b/content-api/hierarchy-manager/src/main/scala/org/sunbird/managers/HierarchyManager.scala index 186232d39..966e2a470 100644 --- a/content-api/hierarchy-manager/src/main/scala/org/sunbird/managers/HierarchyManager.scala +++ b/content-api/hierarchy-manager/src/main/scala/org/sunbird/managers/HierarchyManager.scala @@ -7,7 +7,7 @@ import org.apache.commons.lang3.StringUtils import org.sunbird.cache.impl.RedisCache import org.sunbird.common.dto.{Request, Response, ResponseHandler, ResponseParams} import org.sunbird.common.exception.{ClientException, ErrorCodes, ResourceNotFoundException, ResponseCode, ServerException} -import org.sunbird.common.{JsonUtils, Platform} +import org.sunbird.common.{JsonUtils, JWTUtil, Platform} import org.sunbird.graph.dac.model.Node import org.sunbird.graph.nodes.DataNode import org.sunbird.graph.utils.{NodeUtil, ScalaJsonUtils} @@ -132,7 +132,9 @@ object HierarchyManager { } val bookmarkId = request.get("bookmarkId").asInstanceOf[String] var metadata: util.Map[String, AnyRef] = NodeUtil.serialize(rootNode, new util.ArrayList[String](), request.getContext.get("schemaName").asInstanceOf[String], request.getContext.get("version").asInstanceOf[String]) - + if (!validateContentSecurity(request, metadata)) { + Future(ResponseHandler.ERROR(ResponseCode.RESOURCE_NOT_FOUND, ResponseCode.RESOURCE_NOT_FOUND.name(), "User can't read content with Id: " + request.get("rootId"))) + } fetchRelationalMetadata(request, rootNode.getIdentifier).map(collRelationalMetadata => { val hierarchy = fetchHierarchy(request, rootNode.getIdentifier) @@ -211,15 +213,24 @@ object HierarchyManager { if (!result.isEmpty) { val bookmarkId = request.get("bookmarkId").asInstanceOf[String] val rootHierarchy = result.get("content").asInstanceOf[util.Map[String, AnyRef]] - if (StringUtils.isEmpty(bookmarkId)) { - ResponseHandler.OK.put("content", rootHierarchy) + if (!validateContentSecurity(request, rootHierarchy)) { + ResponseHandler.ERROR(ResponseCode.RESOURCE_NOT_FOUND, ResponseCode.RESOURCE_NOT_FOUND.name(), "User can't read content with Id: " + request.get("rootId")) } else { - val children = rootHierarchy.getOrElse("children", new util.ArrayList[util.Map[String, AnyRef]]()).asInstanceOf[util.List[util.Map[String, AnyRef]]] - val bookmarkHierarchy = filterBookmarkHierarchy(children, bookmarkId) - if (MapUtils.isEmpty(bookmarkHierarchy)) { - ResponseHandler.ERROR(ResponseCode.RESOURCE_NOT_FOUND, ResponseCode.RESOURCE_NOT_FOUND.name(), "bookmarkId " + bookmarkId + " does not exist") + if (isSecureContent(rootHierarchy)) { + val csToken = generateCSToken(rootHierarchy.get("childNodes").asInstanceOf[util.List[String]]) + rootHierarchy.put("cstoken", csToken) + } + + if (StringUtils.isEmpty(bookmarkId)) { + ResponseHandler.OK.put("content", rootHierarchy) } else { - ResponseHandler.OK.put("content", bookmarkHierarchy) + val children = rootHierarchy.getOrElse("children", new util.ArrayList[util.Map[String, AnyRef]]()).asInstanceOf[util.List[util.Map[String, AnyRef]]] + val bookmarkHierarchy = filterBookmarkHierarchy(children, bookmarkId) + if (MapUtils.isEmpty(bookmarkHierarchy)) { + ResponseHandler.ERROR(ResponseCode.RESOURCE_NOT_FOUND, ResponseCode.RESOURCE_NOT_FOUND.name(), "bookmarkId " + bookmarkId + " does not exist") + } else { + ResponseHandler.OK.put("content", bookmarkHierarchy) + } } } } else @@ -453,7 +464,7 @@ object HierarchyManager { responseFuture.map(response => { if (!ResponseHandler.checkError(response)) { val relationalMetadataString = response.getResult.toMap.getOrDefault("relational_metadata", "").asInstanceOf[String] - if (StringUtils.isNotEmpty(relationalMetadataString)) { + if (StringUtils.isNotEmpty(relationalMetadataString) && !relationalMetadataString.equalsIgnoreCase("null")) { Future(JsonUtils.deserialize(relationalMetadataString, classOf[java.util.Map[String, AnyRef]]).toMap) } else Future(Map[String, AnyRef]()) @@ -464,7 +475,7 @@ object HierarchyManager { responseFuture.map(response => { if (!ResponseHandler.checkError(response)) { val relationalMetadataString = response.getResult.toMap.getOrDefault("relational_metadata", "").asInstanceOf[String] - if (StringUtils.isNotEmpty(relationalMetadataString)) { + if (StringUtils.isNotEmpty(relationalMetadataString) && !relationalMetadataString.equalsIgnoreCase("null")) { Future(JsonUtils.deserialize(relationalMetadataString, classOf[java.util.Map[String, AnyRef]]).toMap) } else Future(Map[String, AnyRef]()) @@ -713,4 +724,40 @@ object HierarchyManager { if(configObjTypes.nonEmpty && !configObjTypes.contains(childNode.getOrDefault("objectType", "").asInstanceOf[String])) throw new ClientException("ERR_INVALID_CHILDREN", "Invalid Children objectType "+childNode.get("objectType")+" found for : "+childNode.get("identifier") + "| Please provide children having one of the objectType from "+ configObjTypes.asJava) } + + def isSecureContent (metadata: util.Map[String, AnyRef])(implicit ec: ExecutionContext): Boolean = { + var securityAttribute : util.Map[String, AnyRef] = metadata.getOrDefault("secureSettings", new util.HashMap[String, AnyRef]).asInstanceOf[util.Map[String, AnyRef]] + var isSecureContent = false + if (MapUtils.isNotEmpty(securityAttribute)) { + var orgList : util.ArrayList[String] = securityAttribute.getOrDefault("organisation", new util.ArrayList[String]).asInstanceOf[util.ArrayList[String]] + if (!CollectionUtils.isEmpty(orgList)) { + isSecureContent = true + } + } + isSecureContent + } + + def validateContentSecurity(request: Request, metadata: util.Map[String, AnyRef])(implicit ec: ExecutionContext): Boolean = { + var securityAttribute : util.Map[String, AnyRef] = metadata.getOrDefault("secureSettings", new util.HashMap[String, AnyRef]).asInstanceOf[util.Map[String, AnyRef]] + var isUserAllowedToRead = true + if (MapUtils.isNotEmpty(securityAttribute)) { + var orgList : util.ArrayList[String] = securityAttribute.getOrDefault("organisation", new util.ArrayList[String]).asInstanceOf[util.ArrayList[String]] + if (!CollectionUtils.isEmpty(orgList)) { + //Content should be read by unique org users only. + var userChannelId : String = request.getRequest.getOrDefault("x-user-channel-id", "").asInstanceOf[String] + if (!orgList.contains(userChannelId)) { + isUserAllowedToRead = false + } + } + } + isUserAllowedToRead + } + + def generateCSToken(children: util.List[String])(implicit ec: ExecutionContext): String = { + var csToken = ""; + var claimsMap : util.Map[String, AnyRef] = new util.HashMap[String, AnyRef] + claimsMap.put("contentIdentifier", children) + csToken = JWTUtil.createHS256Token(claimsMap) + csToken + } } diff --git a/content-api/hierarchy-manager/src/main/scala/org/sunbird/managers/UpdateHierarchyManager.scala b/content-api/hierarchy-manager/src/main/scala/org/sunbird/managers/UpdateHierarchyManager.scala index 01c245c6a..fbb73934a 100644 --- a/content-api/hierarchy-manager/src/main/scala/org/sunbird/managers/UpdateHierarchyManager.scala +++ b/content-api/hierarchy-manager/src/main/scala/org/sunbird/managers/UpdateHierarchyManager.scala @@ -345,7 +345,7 @@ object UpdateHierarchyManager { if (MapUtils.isNotEmpty(childrenIdentifiersMap)) { val updatedNodeList = getTempNode(nodeList, rootId) :: List() updateHierarchyRelatedData(childrenIdentifiersMap.getOrElse(rootId, Map[String, Int]()), 1, - rootId, nodeList, childrenIdentifiersMap, updatedNodeList, request).map(finalEnrichedNodeList => { + rootId, nodeList, childrenIdentifiersMap, updatedNodeList, request, rootId).map(finalEnrichedNodeList => { TelemetryManager.info("Final enriched list size: " + finalEnrichedNodeList.size) val childNodeIds = finalEnrichedNodeList.map(node => node.getIdentifier.replaceAll(".img", "")).filterNot(id => StringUtils.containsIgnoreCase(rootId, id)).distinct TelemetryManager.info("Final enriched ids (childNodes): " + childNodeIds + " :: size: " + childNodeIds.size) @@ -368,7 +368,8 @@ object UpdateHierarchyManager { } @throws[Exception] - private def updateHierarchyRelatedData(childrenIds: Map[String, Int], depth: Int, parent: String, nodeList: List[Node], hierarchyStructure: Map[String, Map[String, Int]], enrichedNodeList: scala.collection.immutable.List[Node], request: Request)(implicit oec: OntologyEngineContext, ec: ExecutionContext): Future[List[Node]] = { + private def updateHierarchyRelatedData(childrenIds: Map[String, Int], depth: Int, parent: String, nodeList: List[Node], hierarchyStructure: Map[String, Map[String, Int]], enrichedNodeList: scala.collection.immutable.List[Node], request: Request, rootId: String)(implicit oec: OntologyEngineContext, ec: ExecutionContext): Future[List[Node]] = { + val rootResourceChange: Boolean = if (Platform.config.hasPath("hierarchyUpdate.allow.resource.at.root.level")) Platform.config.getBoolean("hierarchyUpdate.allow.resource.at.root.level") else false val futures = childrenIds.map(child => { val id = child._1 val index = child._2 + 1 @@ -378,13 +379,19 @@ object UpdateHierarchyManager { val nxtEnrichedNodeList = tempNode :: enrichedNodeList if (MapUtils.isNotEmpty(hierarchyStructure.getOrDefault(child._1, Map[String, Int]()))) updateHierarchyRelatedData(hierarchyStructure.getOrDefault(child._1, Map[String, Int]()), - tempNode.getMetadata.get(HierarchyConstants.DEPTH).asInstanceOf[Int] + 1, id, nodeList, hierarchyStructure, nxtEnrichedNodeList, request) + tempNode.getMetadata.get(HierarchyConstants.DEPTH).asInstanceOf[Int] + 1, id, nodeList, hierarchyStructure, nxtEnrichedNodeList, request, rootId) else Future(nxtEnrichedNodeList) } else { // TelemetryManager.info("Get ContentNode as TempNode is null for ID: " + id) getContentNode(id, HierarchyConstants.TAXONOMY_ID).map(node => { - val parentNode: Node = nodeList.find(p => p.getIdentifier.equals(parent)).orNull + val parentNode: Node = if (rootResourceChange && nodeList.find(p => p.getIdentifier.equals(parent)).orNull == null) { + if (nodeList.find(p => p.getIdentifier.equals(rootId)).orNull == null) + nodeList.find(p => p.getIdentifier.equals(rootId + ".img")).orNull + else + nodeList.find(p => p.getIdentifier.equals(rootId)).orNull + } else + nodeList.find(p => p.getIdentifier.equals(parent)).orNull val nxtEnrichedNodeList = if (null != parentNode) { TelemetryManager.info(s"ObjectType for $parent is ${parentNode.getObjectType}...") val parentMetadata: java.util.Map[String, AnyRef] = NodeUtil.serialize(parentNode, new java.util.ArrayList[String](), parentNode.getObjectType.toLowerCase, "1.0") @@ -401,7 +408,7 @@ object UpdateHierarchyManager { enrichedNodeList } if (MapUtils.isNotEmpty(hierarchyStructure.getOrDefault(id, Map[String, Int]()))) { - updateHierarchyRelatedData(hierarchyStructure.getOrDefault(id, Map[String, Int]()), node.getMetadata.get(HierarchyConstants.DEPTH).asInstanceOf[Int] + 1, id, nodeList, hierarchyStructure, nxtEnrichedNodeList, request) + updateHierarchyRelatedData(hierarchyStructure.getOrDefault(id, Map[String, Int]()), node.getMetadata.get(HierarchyConstants.DEPTH).asInstanceOf[Int] + 1, id, nodeList, hierarchyStructure, nxtEnrichedNodeList, request, rootId) } else Future(nxtEnrichedNodeList) }).flatMap(f => f) recoverWith { case e: CompletionException => throw e.getCause } diff --git a/ontology-engine/graph-dac-api/src/main/java/org/sunbird/graph/service/util/BaseQueryGenerationUtil.java b/ontology-engine/graph-dac-api/src/main/java/org/sunbird/graph/service/util/BaseQueryGenerationUtil.java index df732461d..57e538da8 100644 --- a/ontology-engine/graph-dac-api/src/main/java/org/sunbird/graph/service/util/BaseQueryGenerationUtil.java +++ b/ontology-engine/graph-dac-api/src/main/java/org/sunbird/graph/service/util/BaseQueryGenerationUtil.java @@ -172,6 +172,9 @@ protected static Map getSystemPropertyQueryMap(Node node, String if (StringUtils.isBlank(node.getIdentifier())) node.setIdentifier(Identifier.getIdentifier(node.getGraphId(), Identifier.getUniqueIdFromTimestamp())); + if (node.getMetadata().containsKey("secureSettings") && !node.getIdentifier().contains("_rc")) { + node.setIdentifier(node.getIdentifier() + "_rc"); + } // Adding 'IL_UNIQUE_ID' Property query.append( SystemProperties.IL_UNIQUE_ID.name() + ": { SP_" + SystemProperties.IL_UNIQUE_ID.name() + " }, "); diff --git a/ontology-engine/graph-engine_2.11/src/main/scala/org/sunbird/graph/nodes/DataNode.scala b/ontology-engine/graph-engine_2.11/src/main/scala/org/sunbird/graph/nodes/DataNode.scala index 1e7b83178..61ca93352 100644 --- a/ontology-engine/graph-engine_2.11/src/main/scala/org/sunbird/graph/nodes/DataNode.scala +++ b/ontology-engine/graph-engine_2.11/src/main/scala/org/sunbird/graph/nodes/DataNode.scala @@ -323,4 +323,45 @@ object DataNode { }) } + def delete(request: Request)(implicit oec: OntologyEngineContext, ec: ExecutionContext): Future[Response] = { + val identifier = request.get("identifier").asInstanceOf[String] + val updatedIdentifier = if (!identifier.endsWith(".img")) s"$identifier.img" else identifier + request.put("identifier", updatedIdentifier) + if (StringUtils.isBlank(identifier)) { + throw new ClientException("ERR_INVALID_REQUEST", "Identifier is required for deletion") + } + // Perform the deletion operation + oec.graphService.deleteNode(request.graphId, identifier, request).map { _ => + val response = new Response + response.put("message", "Node deleted successfully") + response + }.recover { + case ex: Exception => + throw new ClientException("ERR_NODE_DELETION_FAILED", s"Failed to delete node with identifier: $identifier", ex) + } + } + + def updatev2(request: Request, dataModifier: (Node) => Node = defaultDataModifier, flag: Boolean)(implicit oec: OntologyEngineContext, ec: ExecutionContext): Future[Response] = { + val identifier = request.get("identifier").asInstanceOf[String] + if (StringUtils.isBlank(identifier)) { + throw new ClientException("ERR_INVALID_REQUEST", "Identifier is required for update") + } + // Read the node to be updated + DataNode.read(request).flatMap { node => + val updatedNode = dataModifier(node) // Apply the data modifier to update the node + if(flag) { + updatedNode.setIdentifier(identifier.replace(".img","")) + } + oec.graphService.upsertNode(request.graphId, updatedNode, request).map { updatedNode => + val response = new Response + response.put("message", "Node updated successfully") + response.put("identifier", updatedNode.getIdentifier) + response + } + }.recover { + case ex: Exception => + throw new ClientException("ERR_NODE_UPDATE_FAILED", s"Failed to update node with identifier: $identifier", ex) + } + } + } diff --git a/ontology-engine/graph-engine_2.11/src/main/scala/org/sunbird/graph/schema/validator/VersioningNode.scala b/ontology-engine/graph-engine_2.11/src/main/scala/org/sunbird/graph/schema/validator/VersioningNode.scala index 36190dca2..f35f4e0ae 100644 --- a/ontology-engine/graph-engine_2.11/src/main/scala/org/sunbird/graph/schema/validator/VersioningNode.scala +++ b/ontology-engine/graph-engine_2.11/src/main/scala/org/sunbird/graph/schema/validator/VersioningNode.scala @@ -85,17 +85,22 @@ trait VersioningNode extends IDefinition { node.getMetadata.put("status", "Draft") node.getMetadata.put("prevStatus", status) node.getMetadata.put(AuditProperties.lastStatusChangedOn.name, DateUtils.formatCurrentDate()) - oec.graphService.addNode(node.getGraphId, node).map(imgNode => { - imgNode.getMetadata.put("isImageNodeCreated", "yes"); - copyExternalProps(identifier, node.getGraphId, imgNode.getObjectType.toLowerCase().replace("image", "")).map(response => { - if(!ResponseHandler.checkError(response)) { - if(null != response.getResult && !response.getResult.isEmpty) - imgNode.setExternalData(response.getResult) + oec.graphService.addNode(node.getGraphId, node).map { imgNode => + imgNode.getMetadata.put("isImageNodeCreated", "yes") + val category = node.getMetadata.get("category").asInstanceOf[String] + if (!"event".equalsIgnoreCase(category)) { + copyExternalProps(identifier, node.getGraphId, imgNode.getObjectType.toLowerCase().replace("image", "")).map { response => + if (!ResponseHandler.checkError(response)) { + if (null != response.getResult && !response.getResult.isEmpty) + imgNode.setExternalData(response.getResult) + } + imgNode } - imgNode - }) - }).flatMap(f=>f) - } else + } else { + Future.successful(imgNode) + } + }.flatMap(f => f) + }else throw e.getCause } } diff --git a/platform-core/platform-common/src/main/java/org/sunbird/common/Base64Util.java b/platform-core/platform-common/src/main/java/org/sunbird/common/Base64Util.java new file mode 100644 index 000000000..73fb8e50a --- /dev/null +++ b/platform-core/platform-common/src/main/java/org/sunbird/common/Base64Util.java @@ -0,0 +1,741 @@ +package org.sunbird.common; + +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import java.io.UnsupportedEncodingException; + +/** + * Utilities for encoding and decoding the Base64 representation of + * binary data. See RFCs 2045 and 3548. + */ +public class Base64Util { + /** + * Default values for encoder/decoder flags. + */ + public static final int DEFAULT = 0; + + /** + * Encoder flag bit to omit the padding '=' characters at the end + * of the output (if any). + */ + public static final int NO_PADDING = 1; + + /** + * Encoder flag bit to omit all line terminators (i.e., the output + * will be on one long line). + */ + public static final int NO_WRAP = 2; + + /** + * Encoder flag bit to indicate lines should be terminated with a + * CRLF pair instead of just an LF. Has no effect if {@code + * NO_WRAP} is specified as well. + */ + public static final int CRLF = 4; + + /** + * Encoder/decoder flag bit to indicate using the "URL and + * filename safe" variant of Base64 (see RFC 3548 section 4) where + * {@code -} and {@code _} are used in place of {@code +} and + * {@code /}. + */ + public static final int URL_SAFE = 8; + + /** + * Flag to pass to {Base64OutputStream} to indicate that it + * should not close the output stream it is wrapping when it + * itself is closed. + */ + public static final int NO_CLOSE = 16; + + // -------------------------------------------------------- + // shared code + // -------------------------------------------------------- + + private Base64Util() { + } // don't instantiate + + // -------------------------------------------------------- + // decoding + // -------------------------------------------------------- + + /** + * Decode the Base64-encoded data in input and return the data in + * a new byte array. + *

+ *

The padding '=' characters at the end are considered optional, but + * if any are present, there must be the correct number of them. + * + * @param str the input String to decode, which is converted to + * bytes using the default charset + * @param flags controls certain features of the decoded output. + * Pass {@code DEFAULT} to decode standard Base64. + * @throws IllegalArgumentException if the input contains + * incorrect padding + */ + public static byte[] decode(String str, int flags) { + return decode(str.getBytes(), flags); + } + + /** + * Decode the Base64-encoded data in input and return the data in + * a new byte array. + *

+ *

The padding '=' characters at the end are considered optional, but + * if any are present, there must be the correct number of them. + * + * @param input the input array to decode + * @param flags controls certain features of the decoded output. + * Pass {@code DEFAULT} to decode standard Base64. + * @throws IllegalArgumentException if the input contains + * incorrect padding + */ + public static byte[] decode(byte[] input, int flags) { + return decode(input, 0, input.length, flags); + } + + /** + * Decode the Base64-encoded data in input and return the data in + * a new byte array. + *

+ *

The padding '=' characters at the end are considered optional, but + * if any are present, there must be the correct number of them. + * + * @param input the data to decode + * @param offset the position within the input array at which to start + * @param len the number of bytes of input to decode + * @param flags controls certain features of the decoded output. + * Pass {@code DEFAULT} to decode standard Base64. + * @throws IllegalArgumentException if the input contains + * incorrect padding + */ + public static byte[] decode(byte[] input, int offset, int len, int flags) { + // Allocate space for the most data the input could represent. + // (It could contain less if it contains whitespace, etc.) + Decoder decoder = new Decoder(flags, new byte[len * 3 / 4]); + + if (!decoder.process(input, offset, len, true)) { + throw new IllegalArgumentException("bad base-64"); + } + + // Maybe we got lucky and allocated exactly enough output space. + if (decoder.op == decoder.output.length) { + return decoder.output; + } + + // Need to shorten the array, so allocate a new one of the + // right size and copy. + byte[] temp = new byte[decoder.op]; + System.arraycopy(decoder.output, 0, temp, 0, decoder.op); + return temp; + } + + /** + * Base64-encode the given data and return a newly allocated + * String with the result. + * + * @param input the data to encode + * @param flags controls certain features of the encoded output. + * Passing {@code DEFAULT} results in output that + * adheres to RFC 2045. + */ + public static String encodeToString(byte[] input, int flags) { + try { + return new String(encode(input, flags), "US-ASCII"); + } catch (UnsupportedEncodingException e) { + // US-ASCII is guaranteed to be available. + throw new AssertionError(e); + } + } + + // -------------------------------------------------------- + // encoding + // -------------------------------------------------------- + + /** + * Base64-encode the given data and return a newly allocated + * String with the result. + * + * @param input the data to encode + * @param offset the position within the input array at which to + * start + * @param len the number of bytes of input to encode + * @param flags controls certain features of the encoded output. + * Passing {@code DEFAULT} results in output that + * adheres to RFC 2045. + */ + public static String encodeToString(byte[] input, int offset, int len, int flags) { + try { + return new String(encode(input, offset, len, flags), "US-ASCII"); + } catch (UnsupportedEncodingException e) { + // US-ASCII is guaranteed to be available. + throw new AssertionError(e); + } + } + + /** + * Base64-encode the given data and return a newly allocated + * byte[] with the result. + * + * @param input the data to encode + * @param flags controls certain features of the encoded output. + * Passing {@code DEFAULT} results in output that + * adheres to RFC 2045. + */ + public static byte[] encode(byte[] input, int flags) { + return encode(input, 0, input.length, flags); + } + + /** + * Base64-encode the given data and return a newly allocated + * byte[] with the result. + * + * @param input the data to encode + * @param offset the position within the input array at which to + * start + * @param len the number of bytes of input to encode + * @param flags controls certain features of the encoded output. + * Passing {@code DEFAULT} results in output that + * adheres to RFC 2045. + */ + public static byte[] encode(byte[] input, int offset, int len, int flags) { + Encoder encoder = new Encoder(flags, null); + + // Compute the exact length of the array we will produce. + int output_len = len / 3 * 4; + + // Account for the tail of the data and the padding bytes, if any. + if (encoder.do_padding) { + if (len % 3 > 0) { + output_len += 4; + } + } else { + switch (len % 3) { + case 0: + break; + case 1: + output_len += 2; + break; + case 2: + output_len += 3; + break; + } + } + + // Account for the newlines, if any. + if (encoder.do_newline && len > 0) { + output_len += (((len - 1) / (3 * Encoder.LINE_GROUPS)) + 1) * + (encoder.do_cr ? 2 : 1); + } + + encoder.output = new byte[output_len]; + encoder.process(input, offset, len, true); + + assert encoder.op == output_len; + + return encoder.output; + } + + /* package */ static abstract class Coder { + public byte[] output; + public int op; + + /** + * Encode/decode another block of input data. this.output is + * provided by the caller, and must be big enough to hold all + * the coded data. On exit, this.opwill be set to the length + * of the coded data. + * + * @param finish true if this is the final call to process for + * this object. Will finalize the coder state and + * include any final bytes in the output. + * @return true if the input so far is good; false if some + * error has been detected in the input stream.. + */ + public abstract boolean process(byte[] input, int offset, int len, boolean finish); + + /** + * @return the maximum number of bytes a call to process() + * could produce for the given number of input bytes. This may + * be an overestimate. + */ + public abstract int maxOutputSize(int len); + } + + /* package */ static class Decoder extends Coder { + /** + * Lookup table for turning bytes into their position in the + * Base64 alphabet. + */ + private static final int DECODE[] = { + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 62, -1, -1, -1, 63, + 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, -1, -1, -1, -2, -1, -1, + -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, + 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, -1, -1, -1, -1, -1, + -1, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, + 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + }; + + /** + * Decode lookup table for the "web safe" variant (RFC 3548 + * sec. 4) where - and _ replace + and /. + */ + private static final int DECODE_WEBSAFE[] = { + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 62, -1, -1, + 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, -1, -1, -1, -2, -1, -1, + -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, + 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, -1, -1, -1, -1, 63, + -1, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, + 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + }; + + /** + * Non-data values in the DECODE arrays. + */ + private static final int SKIP = -1; + private static final int EQUALS = -2; + final private int[] alphabet; + /** + * States 0-3 are reading through the next input tuple. + * State 4 is having read one '=' and expecting exactly + * one more. + * State 5 is expecting no more data or padding characters + * in the input. + * State 6 is the error state; an error has been detected + * in the input and no future input can "fix" it. + */ + private int state; // state number (0 to 6) + private int value; + + public Decoder(int flags, byte[] output) { + this.output = output; + + alphabet = ((flags & URL_SAFE) == 0) ? DECODE : DECODE_WEBSAFE; + state = 0; + value = 0; + } + + /** + * @return an overestimate for the number of bytes {@code + * len} bytes could decode to. + */ + public int maxOutputSize(int len) { + return len * 3 / 4 + 10; + } + + /** + * Decode another block of input data. + * + * @return true if the state machine is still healthy. false if + * bad base-64 data has been detected in the input stream. + */ + public boolean process(byte[] input, int offset, int len, boolean finish) { + if (this.state == 6) return false; + + int p = offset; + len += offset; + + // Using local variables makes the decoder about 12% + // faster than if we manipulate the member variables in + // the loop. (Even alphabet makes a measurable + // difference, which is somewhat surprising to me since + // the member variable is final.) + int state = this.state; + int value = this.value; + int op = 0; + final byte[] output = this.output; + final int[] alphabet = this.alphabet; + + while (p < len) { + // Try the fast path: we're starting a new tuple and the + // next four bytes of the input stream are all data + // bytes. This corresponds to going through states + // 0-1-2-3-0. We expect to use this method for most of + // the data. + // + // If any of the next four bytes of input are non-data + // (whitespace, etc.), value will end up negative. (All + // the non-data values in decode are small negative + // numbers, so shifting any of them up and or'ing them + // together will result in a value with its top bit set.) + // + // You can remove this whole block and the output should + // be the same, just slower. + if (state == 0) { + while (p + 4 <= len && + (value = ((alphabet[input[p] & 0xff] << 18) | + (alphabet[input[p + 1] & 0xff] << 12) | + (alphabet[input[p + 2] & 0xff] << 6) | + (alphabet[input[p + 3] & 0xff]))) >= 0) { + output[op + 2] = (byte) value; + output[op + 1] = (byte) (value >> 8); + output[op] = (byte) (value >> 16); + op += 3; + p += 4; + } + if (p >= len) break; + } + + // The fast path isn't available -- either we've read a + // partial tuple, or the next four input bytes aren't all + // data, or whatever. Fall back to the slower state + // machine implementation. + + int d = alphabet[input[p++] & 0xff]; + + switch (state) { + case 0: + if (d >= 0) { + value = d; + ++state; + } else if (d != SKIP) { + this.state = 6; + return false; + } + break; + + case 1: + if (d >= 0) { + value = (value << 6) | d; + ++state; + } else if (d != SKIP) { + this.state = 6; + return false; + } + break; + + case 2: + if (d >= 0) { + value = (value << 6) | d; + ++state; + } else if (d == EQUALS) { + // Emit the last (partial) output tuple; + // expect exactly one more padding character. + output[op++] = (byte) (value >> 4); + state = 4; + } else if (d != SKIP) { + this.state = 6; + return false; + } + break; + + case 3: + if (d >= 0) { + // Emit the output triple and return to state 0. + value = (value << 6) | d; + output[op + 2] = (byte) value; + output[op + 1] = (byte) (value >> 8); + output[op] = (byte) (value >> 16); + op += 3; + state = 0; + } else if (d == EQUALS) { + // Emit the last (partial) output tuple; + // expect no further data or padding characters. + output[op + 1] = (byte) (value >> 2); + output[op] = (byte) (value >> 10); + op += 2; + state = 5; + } else if (d != SKIP) { + this.state = 6; + return false; + } + break; + + case 4: + if (d == EQUALS) { + ++state; + } else if (d != SKIP) { + this.state = 6; + return false; + } + break; + + case 5: + if (d != SKIP) { + this.state = 6; + return false; + } + break; + } + } + + if (!finish) { + // We're out of input, but a future call could provide + // more. + this.state = state; + this.value = value; + this.op = op; + return true; + } + + // Done reading input. Now figure out where we are left in + // the state machine and finish up. + + switch (state) { + case 0: + // Output length is a multiple of three. Fine. + break; + case 1: + // Read one extra input byte, which isn't enough to + // make another output byte. Illegal. + this.state = 6; + return false; + case 2: + // Read two extra input bytes, enough to emit 1 more + // output byte. Fine. + output[op++] = (byte) (value >> 4); + break; + case 3: + // Read three extra input bytes, enough to emit 2 more + // output bytes. Fine. + output[op++] = (byte) (value >> 10); + output[op++] = (byte) (value >> 2); + break; + case 4: + // Read one padding '=' when we expected 2. Illegal. + this.state = 6; + return false; + case 5: + // Read all the padding '='s we expected and no more. + // Fine. + break; + } + + this.state = state; + this.op = op; + return true; + } + } + + /* package */ static class Encoder extends Coder { + /** + * Emit a new line every this many output tuples. Corresponds to + * a 76-character line length (the maximum allowable according to + * RFC 2045). + */ + public static final int LINE_GROUPS = 19; + + /** + * Lookup table for turning Base64 alphabet positions (6 bits) + * into output bytes. + */ + private static final byte ENCODE[] = { + 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', + 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'a', 'b', 'c', 'd', 'e', 'f', + 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', + 'w', 'x', 'y', 'z', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '+', '/', + }; + + /** + * Lookup table for turning Base64 alphabet positions (6 bits) + * into output bytes. + */ + private static final byte ENCODE_WEBSAFE[] = { + 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', + 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'a', 'b', 'c', 'd', 'e', 'f', + 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', + 'w', 'x', 'y', 'z', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '-', '_', + }; + final public boolean do_padding; + final public boolean do_newline; + final public boolean do_cr; + final private byte[] tail; + final private byte[] alphabet; + /* package */ int tailLen; + private int count; + + public Encoder(int flags, byte[] output) { + this.output = output; + + do_padding = (flags & NO_PADDING) == 0; + do_newline = (flags & NO_WRAP) == 0; + do_cr = (flags & CRLF) != 0; + alphabet = ((flags & URL_SAFE) == 0) ? ENCODE : ENCODE_WEBSAFE; + + tail = new byte[2]; + tailLen = 0; + + count = do_newline ? LINE_GROUPS : -1; + } + + /** + * @return an overestimate for the number of bytes {@code + * len} bytes could encode to. + */ + public int maxOutputSize(int len) { + return len * 8 / 5 + 10; + } + + public boolean process(byte[] input, int offset, int len, boolean finish) { + // Using local variables makes the encoder about 9% faster. + final byte[] alphabet = this.alphabet; + final byte[] output = this.output; + int op = 0; + int count = this.count; + + int p = offset; + len += offset; + int v = -1; + + // First we need to concatenate the tail of the previous call + // with any input bytes available now and see if we can empty + // the tail. + + switch (tailLen) { + case 0: + // There was no tail. + break; + + case 1: + if (p + 2 <= len) { + // A 1-byte tail with at least 2 bytes of + // input available now. + v = ((tail[0] & 0xff) << 16) | + ((input[p++] & 0xff) << 8) | + (input[p++] & 0xff); + tailLen = 0; + } + ; + break; + + case 2: + if (p + 1 <= len) { + // A 2-byte tail with at least 1 byte of input. + v = ((tail[0] & 0xff) << 16) | + ((tail[1] & 0xff) << 8) | + (input[p++] & 0xff); + tailLen = 0; + } + break; + } + + if (v != -1) { + output[op++] = alphabet[(v >> 18) & 0x3f]; + output[op++] = alphabet[(v >> 12) & 0x3f]; + output[op++] = alphabet[(v >> 6) & 0x3f]; + output[op++] = alphabet[v & 0x3f]; + if (--count == 0) { + if (do_cr) output[op++] = '\r'; + output[op++] = '\n'; + count = LINE_GROUPS; + } + } + + // At this point either there is no tail, or there are fewer + // than 3 bytes of input available. + + // The main loop, turning 3 input bytes into 4 output bytes on + // each iteration. + while (p + 3 <= len) { + v = ((input[p] & 0xff) << 16) | + ((input[p + 1] & 0xff) << 8) | + (input[p + 2] & 0xff); + output[op] = alphabet[(v >> 18) & 0x3f]; + output[op + 1] = alphabet[(v >> 12) & 0x3f]; + output[op + 2] = alphabet[(v >> 6) & 0x3f]; + output[op + 3] = alphabet[v & 0x3f]; + p += 3; + op += 4; + if (--count == 0) { + if (do_cr) output[op++] = '\r'; + output[op++] = '\n'; + count = LINE_GROUPS; + } + } + + if (finish) { + // Finish up the tail of the input. Note that we need to + // consume any bytes in tail before any bytes + // remaining in input; there should be at most two bytes + // total. + + if (p - tailLen == len - 1) { + int t = 0; + v = ((tailLen > 0 ? tail[t++] : input[p++]) & 0xff) << 4; + tailLen -= t; + output[op++] = alphabet[(v >> 6) & 0x3f]; + output[op++] = alphabet[v & 0x3f]; + if (do_padding) { + output[op++] = '='; + output[op++] = '='; + } + if (do_newline) { + if (do_cr) output[op++] = '\r'; + output[op++] = '\n'; + } + } else if (p - tailLen == len - 2) { + int t = 0; + v = (((tailLen > 1 ? tail[t++] : input[p++]) & 0xff) << 10) | + (((tailLen > 0 ? tail[t++] : input[p++]) & 0xff) << 2); + tailLen -= t; + output[op++] = alphabet[(v >> 12) & 0x3f]; + output[op++] = alphabet[(v >> 6) & 0x3f]; + output[op++] = alphabet[v & 0x3f]; + if (do_padding) { + output[op++] = '='; + } + if (do_newline) { + if (do_cr) output[op++] = '\r'; + output[op++] = '\n'; + } + } else if (do_newline && op > 0 && count != LINE_GROUPS) { + if (do_cr) output[op++] = '\r'; + output[op++] = '\n'; + } + + assert tailLen == 0; + assert p == len; + } else { + // Save the leftovers in tail to be consumed on the next + // call to encodeInternal. + + if (p == len - 1) { + tail[tailLen++] = input[p]; + } else if (p == len - 2) { + tail[tailLen++] = input[p]; + tail[tailLen++] = input[p + 1]; + } + } + + this.op = op; + this.count = count; + + return true; + } + } +} diff --git a/platform-core/platform-common/src/main/java/org/sunbird/common/CryptoUtil.java b/platform-core/platform-common/src/main/java/org/sunbird/common/CryptoUtil.java new file mode 100644 index 000000000..6a794063d --- /dev/null +++ b/platform-core/platform-common/src/main/java/org/sunbird/common/CryptoUtil.java @@ -0,0 +1,25 @@ +package org.sunbird.common; + +import java.nio.charset.Charset; +import java.security.InvalidKeyException; +import java.security.NoSuchAlgorithmException; + +import javax.crypto.Mac; +import javax.crypto.spec.SecretKeySpec; + +public class CryptoUtil { + private static final Charset US_ASCII = Charset.forName("US-ASCII"); + + public static byte[] generateHMAC(String payLoad, String secretKey, String algorithm) { + Mac mac; + byte[] signature; + try { + mac = Mac.getInstance(algorithm); + mac.init(new SecretKeySpec(secretKey.getBytes(), algorithm)); + signature = mac.doFinal(payLoad.getBytes(US_ASCII)); + } catch (NoSuchAlgorithmException | InvalidKeyException e) { + return null; + } + return signature; + } +} diff --git a/platform-core/platform-common/src/main/java/org/sunbird/common/JWTUtil.java b/platform-core/platform-common/src/main/java/org/sunbird/common/JWTUtil.java new file mode 100644 index 000000000..9f948967f --- /dev/null +++ b/platform-core/platform-common/src/main/java/org/sunbird/common/JWTUtil.java @@ -0,0 +1,48 @@ +package org.sunbird.common; + +import java.util.HashMap; +import java.util.Map; + +import org.sunbird.common.exception.ServerException; + +public class JWTUtil { + private static String SEPARATOR = "."; + private static String JWT_SECRET_STRING = Platform.config.hasPath("content_security_jwt_secret") ? + Platform.config.getString("content_security_jwt_secret"): "sunbird"; + + public static String createHS256Token(Map claimsMap) { + String token = ""; + JWTokenType tokenType = JWTokenType.HS256; + try { + Map headerOptions = new HashMap(); + String payLoad = createHeader(tokenType, headerOptions) + SEPARATOR + createClaimsMap(claimsMap); + String signature = encodeToBase64Uri( + CryptoUtil.generateHMAC(payLoad, JWT_SECRET_STRING, tokenType.getAlgorithmName())); + token = payLoad + SEPARATOR + signature; + } catch (Exception e) { + throw new ServerException("ERR_INVALID_HEADER_PARAM", "JWTUtil.createHS256Token :: Failed to create RS256 token. Err is : " + e.getMessage()); + } + return token; + } + + private static String createHeader(JWTokenType tokenType, Map headerOptions) throws Exception { + Map headerData = new HashMap<>(); + if (headerOptions != null) + headerData.putAll(headerOptions); + headerData.put("alg", tokenType.getTokenType()); + headerData.put("typ", "JWT"); + return encodeToBase64Uri(JsonUtils.serialize(headerData).getBytes()); + } + + private static String createClaimsMap(Map claimsMap) throws Exception { + Map payloadData = new HashMap<>(); + if (claimsMap != null && claimsMap.size() > 0) { + payloadData.putAll(claimsMap); + } + return encodeToBase64Uri(JsonUtils.serialize(payloadData).getBytes()); + } + + private static String encodeToBase64Uri(byte[] data) { + return Base64Util.encodeToString(data, 11); + } +} diff --git a/platform-core/platform-common/src/main/java/org/sunbird/common/JWTokenType.java b/platform-core/platform-common/src/main/java/org/sunbird/common/JWTokenType.java new file mode 100644 index 000000000..1d3adbf39 --- /dev/null +++ b/platform-core/platform-common/src/main/java/org/sunbird/common/JWTokenType.java @@ -0,0 +1,22 @@ +package org.sunbird.common; + +public enum JWTokenType { + HS256("HS256", "HmacSHA256"), + RS256("RS256", "SHA256withRSA"); + + private String algorithmName; + private String tokenType; + + JWTokenType(String tokenType, String algorithmName) { + this.algorithmName = algorithmName; + this.tokenType = tokenType; + } + + public String getAlgorithmName() { + return algorithmName; + } + + public String getTokenType() { + return tokenType; + } +} diff --git a/platform-modules/mimetype-manager/pom.xml b/platform-modules/mimetype-manager/pom.xml index eb25c58f1..a7a80d411 100644 --- a/platform-modules/mimetype-manager/pom.xml +++ b/platform-modules/mimetype-manager/pom.xml @@ -28,9 +28,9 @@ jar - org.sunbird - cloud-store-sdk - 1.3.0 + net.karmayogibharat + cloud-store-sdk_2.11 + 1.4.6 org.scala-lang diff --git a/platform-modules/mimetype-manager/src/main/scala/org/sunbird/cloudstore/StorageService.scala b/platform-modules/mimetype-manager/src/main/scala/org/sunbird/cloudstore/StorageService.scala index 83fabffc8..942492479 100644 --- a/platform-modules/mimetype-manager/src/main/scala/org/sunbird/cloudstore/StorageService.scala +++ b/platform-modules/mimetype-manager/src/main/scala/org/sunbird/cloudstore/StorageService.scala @@ -20,36 +20,22 @@ class StorageService { @throws[Exception] def getService(): BaseStorageService = { if (null == storageService) { - if (StringUtils.equalsIgnoreCase(storageType, "azure")) { - val storageKey = Platform.config.getString("azure_storage_key") - val storageSecret = Platform.config.getString("azure_storage_secret") - storageService = StorageServiceFactory.getStorageService(new StorageConfig(storageType, storageKey, storageSecret)) - } else if (StringUtils.equalsIgnoreCase(storageType, "aws")) { - val storageKey = Platform.config.getString("aws_storage_key") - val storageSecret = Platform.config.getString("aws_storage_secret") - storageService = StorageServiceFactory.getStorageService(new StorageConfig(storageType, storageKey, storageSecret)) - } - else if (StringUtils.equalsIgnoreCase(storageType, "cephs3")) { - val storageKey = Platform.config.getString("cephs3_storage_key") - val storageSecret = Platform.config.getString("cephs3_storage_secret") - val endpoint = Platform.config.getString("cephs3_storage_endpoint") - storageService = StorageServiceFactory.getStorageService(new StorageConfig(storageType, storageKey, storageSecret, Option(endpoint))) - } - else throw new ServerException("ERR_INVALID_CLOUD_STORAGE", "Error while initialising cloud storage") + val storageKey = Platform.config.getString("cloud_storage_key") + val storageSecret = Platform.config.getString("cloud_storage_secret") + // TODO: endPoint defined to support "cephs3". Make code changes after cloud-store-sdk 2.11 support it. + val endPoint = if (Platform.config.hasPath("cloud_storage_endpoint")) Option(Platform.config.getString("cloud_storage_endpoint")) else None + println("StorageService --> params: " + storageType + "," + storageKey) + storageService = StorageServiceFactory.getStorageService(new StorageConfig(storageType, storageKey, storageSecret, endPoint)) } storageService } def getContainerName(): String = { - if (StringUtils.equalsIgnoreCase(storageType, "azure")) - Platform.config.getString("azure_storage_container") - else if (StringUtils.equalsIgnoreCase(storageType, "aws")) - Platform.config.getString("aws_storage_container") - else if (StringUtils.equalsIgnoreCase(storageType, "cephs3")) - Platform.config.getString("cephs3_storage_container") - else - throw new ServerException("ERR_INVALID_CLOUD_STORAGE", "Container name not configured.") + if(Platform.config.hasPath("cloud_storage_container")) + Platform.config.getString("cloud_storage_container") + else + throw new ServerException("ERR_INVALID_CLOUD_STORAGE", "Cloud Storage Container name not configured.") } def uploadFile(folderName: String, file: File, slug: Option[Boolean] = Option(true)): Array[String] = { diff --git a/platform-modules/mimetype-manager/src/main/scala/org/sunbird/mimetype/mgr/BaseMimeTypeManager.scala b/platform-modules/mimetype-manager/src/main/scala/org/sunbird/mimetype/mgr/BaseMimeTypeManager.scala index c357443c5..416d75925 100644 --- a/platform-modules/mimetype-manager/src/main/scala/org/sunbird/mimetype/mgr/BaseMimeTypeManager.scala +++ b/platform-modules/mimetype-manager/src/main/scala/org/sunbird/mimetype/mgr/BaseMimeTypeManager.scala @@ -214,6 +214,20 @@ class BaseMimeTypeManager(implicit ss: StorageService) { } } + def extractPackageInCloudAsync(objectId: String, uploadFile: File, node: Node, extractionType: String, slugFile: Boolean)(implicit ec: ExecutionContext): Future[List[String]] = { + val file = Slug.createSlugFile(uploadFile) + val mimeType = node.getMetadata.get("mimeType").asInstanceOf[String] + validationForCloudExtraction(file, extractionType, mimeType) + if(extractableMimeTypes.contains(mimeType)){ + val extractionBasePath = getBasePath(objectId) + extractPackage(file, extractionBasePath) + ss.uploadDirectoryAsync(getExtractionPath(objectId, node, extractionType, mimeType), new File(extractionBasePath), Option(slugFile)) + } else { + val emptyFuture: Future[List[String]] = Future.successful(List.empty[String]) + emptyFuture + } + } + def extractH5PPackageInCloud(objectId: String, extractionBasePath: String, node: Node, extractionType: String, slugFile: Boolean)(implicit ec: ExecutionContext): Future[List[String]] = { val mimeType = node.getMetadata.get("mimeType").asInstanceOf[String] if(null == extractionType) diff --git a/platform-modules/mimetype-manager/src/main/scala/org/sunbird/mimetype/mgr/impl/HtmlMimeTypeMgrImpl.scala b/platform-modules/mimetype-manager/src/main/scala/org/sunbird/mimetype/mgr/impl/HtmlMimeTypeMgrImpl.scala index b977d3683..1ba192dec 100644 --- a/platform-modules/mimetype-manager/src/main/scala/org/sunbird/mimetype/mgr/impl/HtmlMimeTypeMgrImpl.scala +++ b/platform-modules/mimetype-manager/src/main/scala/org/sunbird/mimetype/mgr/impl/HtmlMimeTypeMgrImpl.scala @@ -9,6 +9,7 @@ import org.sunbird.graph.OntologyEngineContext import org.sunbird.graph.dac.model.Node import org.sunbird.mimetype.mgr.{BaseMimeTypeManager, MimeTypeManager} import org.sunbird.telemetry.logger.TelemetryManager +import org.sunbird.common.Platform import scala.concurrent.{ExecutionContext, Future} @@ -16,11 +17,14 @@ class HtmlMimeTypeMgrImpl(implicit ss: StorageService) extends BaseMimeTypeManag override def upload(objectId: String, node: Node, uploadFile: File, filePath: Option[String], params: UploadParams)(implicit ec: ExecutionContext): Future[Map[String, AnyRef]] = { validateUploadRequest(objectId, node, uploadFile) - if (isValidPackageStructure(uploadFile, List[String]("index.html"))) { + val indexHtmlValidation: Boolean = if (Platform.config.hasPath("indexHtmlValidation.env")) Platform.config.getBoolean("indexHtmlValidation.env") else true + TelemetryManager.log("Value of indexHtmlValidation: " + indexHtmlValidation) + val flag: Boolean = if (indexHtmlValidation) isValidPackageStructure(uploadFile, List[String]("index.html")) else true + if (flag) { val urls = uploadArtifactToCloud(uploadFile, objectId, filePath) node.getMetadata.put("s3Key", urls(IDX_S3_KEY)) node.getMetadata.put("artifactUrl", urls(IDX_S3_URL)) - extractPackageInCloud(objectId, uploadFile, node, "snapshot", false) + Future { extractPackageInCloudAsync(objectId, uploadFile, node, "snapshot", false) } Future(Map[String, AnyRef]("identifier" -> objectId, "artifactUrl" -> urls(IDX_S3_URL), "s3Key" -> urls(IDX_S3_KEY), "size" -> getFileSize(uploadFile).asInstanceOf[AnyRef])) } else { TelemetryManager.error("ERR_INVALID_FILE" + "Please Provide Valid File! with file name: " + uploadFile.getName) diff --git a/schemas/asset/1.0/schema.json b/schemas/asset/1.0/schema.json index 8aa9ad085..1b3ea5f0d 100644 --- a/schemas/asset/1.0/schema.json +++ b/schemas/asset/1.0/schema.json @@ -86,7 +86,8 @@ "audio/webm", "audio/x-wav", "audio/wav", - "application/json" + "application/json", + "application/vnd.openxmlformats-officedocument.presentationml.presentation" ] }, "osId": { @@ -1249,6 +1250,18 @@ "items": { "type": "string" } + }, + "taxonomyPaths_v2": { + "type": "array", + "items": { + "type": "object" + } + }, + "competencies_v3": { + "type": "array", + "items": { + "type": "object" + } } } } \ No newline at end of file diff --git a/schemas/collection/1.0/schema.json b/schemas/collection/1.0/schema.json index 112e93841..1e0ee9cbf 100644 --- a/schemas/collection/1.0/schema.json +++ b/schemas/collection/1.0/schema.json @@ -1135,7 +1135,8 @@ "type" : "string", "enum": [ "InReview", - "Reviewed" + "Reviewed", + "SentToPublish" ] }, "boardIds": { @@ -1286,6 +1287,24 @@ "items": { "type": "object" } + }, + "taxonomyPaths_v2": { + "type": "array", + "items": { + "type": "object" + } + }, + "competencies_v3": { + "type": "array", + "items": { + "type": "object" + } + }, + "sumOfTotalTatings": { + "type": "string" + }, + "totalNumberOfRatings": { + "type": "string" } } } diff --git a/schemas/content/1.0/schema.json b/schemas/content/1.0/schema.json index 73181a850..c9a9b4b02 100644 --- a/schemas/content/1.0/schema.json +++ b/schemas/content/1.0/schema.json @@ -87,7 +87,10 @@ "audio/webm", "audio/x-wav", "audio/wav", - "application/json" + "application/json", + "application/quiz", + "application/survey", + "application/vnd.openxmlformats-officedocument.presentationml.presentation" ] }, "osId": { @@ -1246,7 +1249,8 @@ "type" : "string", "enum": [ "InReview", - "Reviewed" + "Reviewed", + "SentToPublish" ] }, "boardIds": { @@ -1387,14 +1391,26 @@ "transcripts": { "type": "array", "items": { - "type": "object" + "type": "string" + }, + "taxonomyPaths_v2": { + "type": "array", + "items": { + "type": "object" + } + }, + "accessibility": { + "type": "array", + "items": { + "type": "object" + } + }, + "competencies_v3": { + "type": "array", + "items": { + "type": "object" + } } - }, - "accessibility": { - "type": "array", - "items": { - "type": "object" - } } } } diff --git a/schemas/event/1.0/config.json b/schemas/event/1.0/config.json index 52bdf3ba8..04b1fc8c3 100644 --- a/schemas/event/1.0/config.json +++ b/schemas/event/1.0/config.json @@ -55,7 +55,7 @@ "targetMediumIds", "targetTopicIds" ], - "version": "disable", + "version": "enable", "versionCheckMode": "OFF", "cacheEnabled": false, "schema_restrict_api": false diff --git a/schemas/event/1.0/schema.json b/schemas/event/1.0/schema.json index 69556357c..cfec01e57 100644 --- a/schemas/event/1.0/schema.json +++ b/schemas/event/1.0/schema.json @@ -36,7 +36,10 @@ "enum": [ "Draft", "Live", - "Retired" + "Retired", + "SentToPublish", + "Rejected", + "Cancelled" ], "default": "Draft" }, diff --git a/schemas/question/1.0/schema.json b/schemas/question/1.0/schema.json index 6054a39e3..ae0c3e9d3 100644 --- a/schemas/question/1.0/schema.json +++ b/schemas/question/1.0/schema.json @@ -425,7 +425,10 @@ "enum": [ "MCQ", "FTB", - "SA" + "SA", + "MCQ-MCA", + "MCQ-SCA", + "MTF" ] }, "scoringMode": { @@ -580,6 +583,17 @@ }, "originData": { "type": "object" + }, + "choices": { + "type": "object", + "description": "Choices which needs to be used in MCQ / MTF type question" + }, + "rhsChoices": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Choices which needs to be used in RHS of MTF type question" } }, "additionalProperties": false diff --git a/schemas/questionset/1.0/schema.json b/schemas/questionset/1.0/schema.json index fcc691a25..05674b5c1 100644 --- a/schemas/questionset/1.0/schema.json +++ b/schemas/questionset/1.0/schema.json @@ -656,6 +656,69 @@ }, "originData": { "type": "object" + }, + "trackable": { + "type": "object", + "properties": { + "enabled": { + "type": "string", + "enum": ["Yes","No"], + "default": "No" + }, + "autoBatch": { + "type": "string", + "enum": ["Yes","No"], + "default": "No" + } + }, + "default": { + "enabled": "No", + "autoBatch": "No" + }, + "additionalProperties": false + }, + "purpose": { + "type": "string" + }, + "scoreCutoffType": { + "type": "string", + "enum": [ + "AssessmentLevel", + "SectionLevel" + ], + "default": "AssessmentLevel" + }, + "subTitle": { + "type": "string" + }, + "minimumPassPercentage": { + "type": "number" + }, + "additionalKeywords": { + "type": "array", + "items": { + "type": "string" + } + }, + "additionalInstructions": { + "type": "string" + }, + "reviewStatus": { + "type": "string" + }, + "competencies_v3": { + "type": "array", + "items": { + "type": "object" + } + }, + "retakeAssessmentDuration": { + "type": "number", + "default": 5 + }, + "maxAssessmentRetakeAttempts": { + "type": "number", + "default": 5 } }, "additionalProperties": false diff --git a/scripts/definition-scripts/Course_assessment_QuestionSet.sh b/scripts/definition-scripts/Course_assessment_QuestionSet.sh new file mode 100644 index 000000000..170bd1d05 --- /dev/null +++ b/scripts/definition-scripts/Course_assessment_QuestionSet.sh @@ -0,0 +1,39 @@ +curl -L -X POST '{{host}}/object/category/definition/v4/create' \ +-H 'Content-Type: application/json' \ +--data-raw '{ + "request": { + "objectCategoryDefinition": { + "categoryId": "obj-cat:course-assessment", + "targetObjectType": "QuestionSet", + "objectMetadata": { + "config": {}, + "schema": { + "properties": { + "trackable": { + "type": "object", + "properties": { + "enabled": { + "type": "string", + "enum": [ + "Yes", + "No" + ], + "default": "No" + }, + "autoBatch": { + "type": "string", + "enum": [ + "Yes", + "No" + ], + "default": "No" + } + }, + "additionalProperties": false + } + } + } + } + } + } +}' \ No newline at end of file diff --git a/scripts/definition-scripts/Match_the_Following_Question.sh b/scripts/definition-scripts/Match_the_Following_Question.sh new file mode 100644 index 000000000..e76736227 --- /dev/null +++ b/scripts/definition-scripts/Match_the_Following_Question.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env bash +curl -L -X POST '{{host}}/object/category/definition/v4/create' \ +-H 'Content-Type: application/json' \ +--data-raw '{ + "request": { + "objectCategoryDefinition": { + "categoryId": "obj-cat:mtf-question", + "targetObjectType": "Question", + "objectMetadata": { + "config": {}, + "schema": {} + } + } + } +}' \ No newline at end of file diff --git a/scripts/definition-scripts/Practice_assessment_QuestionSet.sh b/scripts/definition-scripts/Practice_assessment_QuestionSet.sh new file mode 100644 index 000000000..d55cb4318 --- /dev/null +++ b/scripts/definition-scripts/Practice_assessment_QuestionSet.sh @@ -0,0 +1,40 @@ +curl -L -X POST '{{host}}/object/category/definition/v4/create' \ +-H 'Content-Type: application/json' \ +--data-raw '{ + "request": { + "objectCategoryDefinition": { + "categoryId": "obj-cat:practice-question-set", + "targetObjectType": "QuestionSet", + "objectMetadata": { + "config": {}, + "schema": { + "properties": { + "trackable": { + "type": "object", + "properties": { + "enabled": { + "type": "string", + "enum": [ + "Yes", + "No" + ], + "default": "No" + }, + "autoBatch": { + "type": "string", + "enum": ["Yes","No"], + "default": "No" + } + }, + "default": { + "enabled": "No", + "autoBatch": "No" + }, + "additionalProperties": false + } + } + } + } + } + } +}' \ No newline at end of file diff --git a/search-api/search-actors/src/main/java/org/sunbird/actors/SearchActor.java b/search-api/search-actors/src/main/java/org/sunbird/actors/SearchActor.java index 8dd9dcd92..ef6c76002 100644 --- a/search-api/search-actors/src/main/java/org/sunbird/actors/SearchActor.java +++ b/search-api/search-actors/src/main/java/org/sunbird/actors/SearchActor.java @@ -95,6 +95,17 @@ private SearchDTO getSearchDTO(Request request) throws Exception { SearchDTO searchObj = new SearchDTO(); try { Map req = request.getRequest(); + if (req.get("secureSettings") != null) { + searchObj.setSecureSettings((Boolean) req.get("secureSettings")); + } else { + searchObj.setSecureSettings(false); + } + if (req.get(SearchConstants.isSecureSettingsDisabled) != null) { + searchObj.setSecureSettingsDisabled((Boolean) req.get(SearchConstants.isSecureSettingsDisabled)); + } else { + searchObj.setSecureSettingsDisabled(false); + } + searchObj.setUserOrgId((String) request.getContext().get("x-user-channel-id")); TelemetryManager.log("Search Request: ", req); String queryString = (String) req.get(SearchConstants.query); int limit = getIntValue(req.get(SearchConstants.limit)); @@ -106,6 +117,7 @@ private SearchDTO getSearchDTO(Request request) throws Exception { wordChainsRequest = false; List properties = new ArrayList(); Map filters = (Map) req.get(SearchConstants.filters); + Map multiFilters = (Map) req.get("multiFilters"); if (null == filters) filters = new HashMap<>(); if (filters.containsKey("tags")) { @@ -118,6 +130,21 @@ private SearchDTO getSearchDTO(Request request) throws Exception { if (filters.containsKey("relatedBoards")) filters.remove("relatedBoards"); + Map secureSettingsFilter = new HashMap<>(); + for (String key : filters.keySet()) { + if (key.startsWith(SearchConstants.secureSettings)) { + secureSettingsFilter.put(key, filters.get(key)); + } + } + searchObj.setPostFilter(secureSettingsFilter); + if (MapUtils.isEmpty(searchObj.getPostFilter())) { + secureSettingsFilter.put(SearchConstants.secureSettingsOrganisation, searchObj.getUserOrgId()); + searchObj.setPostFilter(secureSettingsFilter); + } else { + for(String key: searchObj.getPostFilter().keySet()) { + filters.remove(key); + } + } Object objectTypeFromFilter = filters.get(SearchConstants.objectType); String objectType = null; if (objectTypeFromFilter != null) { @@ -239,6 +266,11 @@ private SearchDTO getSearchDTO(Request request) throws Exception { searchObj.setSortBy(sortBy); searchObj.setFacets(facets); searchObj.setProperties(properties); + if (multiFilters != null) { + List multiFilterProperties = new ArrayList(); + multiFilterProperties.addAll(getSearchFilterProperties(multiFilters, wordChainsRequest, request)); + searchObj.setMultiFilterProperties(multiFilterProperties); + } // Added Implicit Filter Properties To Support Collection content tagging to reuse by tenants. setImplicitFilters(filters, searchObj); searchObj.setLimit(limit); @@ -416,7 +448,14 @@ private List> getSearchFilterProperties(Map Object filterObject = entry.getValue(); if (filterObject instanceof Map) { Map filterMap = (Map) filterObject; - if (!filterMap.containsKey(SearchConstants.SEARCH_OPERATION_RANGE_MIN) + if (SearchConstants.must.equalsIgnoreCase(entry.getKey())) { + Map property = new HashMap(); + property.put(SearchConstants.values, entry.getValue()); + property.put(SearchConstants.propertyName, entry.getKey()); + property.put(SearchConstants.operation, + SearchConstants.SEARCH_OPERATION_EQUAL); + properties.add(property); + } else if (!filterMap.containsKey(SearchConstants.SEARCH_OPERATION_RANGE_MIN) && !filterMap.containsKey(SearchConstants.SEARCH_OPERATION_RANGE_MAX)) { for (Map.Entry filterEntry : filterMap.entrySet()) { Map property = new HashMap(); diff --git a/search-api/search-core/src/main/java/org/sunbird/search/client/ElasticSearchUtil.java b/search-api/search-core/src/main/java/org/sunbird/search/client/ElasticSearchUtil.java index cc031b378..6b3283d75 100644 --- a/search-api/search-core/src/main/java/org/sunbird/search/client/ElasticSearchUtil.java +++ b/search-api/search-core/src/main/java/org/sunbird/search/client/ElasticSearchUtil.java @@ -49,6 +49,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.Terms.Bucket; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.nested.Nested; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.sunbird.common.Platform; import org.sunbird.common.exception.ServerException; @@ -135,6 +136,11 @@ public static List getQuerySearchFields() { return querySearchFields; } + public static List getNonTextFields() { + List nonTextFields = Platform.config.getStringList("non.text.fields"); + return nonTextFields; + } + public List getDateFields() { List dateFields = Platform.config.getStringList("search.fields.date"); return dateFields; @@ -619,9 +625,16 @@ public static Object getCountFromAggregation(Aggregations aggregations, List aggregationsMap : groupByList) { Map parentCountMap = new HashMap(); String groupByParent = (String) aggregationsMap.get("groupByParent"); - Terms terms = aggregations.get(groupByParent); + Terms terms = null; + List buckets = null; + if (groupByParent.contains(".")) { + Nested nested = aggregations.get(groupByParent.split("\\.")[0]); + terms = nested.getAggregations().get(groupByParent.split("\\.")[1]); + } else { + terms = aggregations.get(groupByParent); + } + buckets = (List)terms.getBuckets(); List> parentGroupList = new ArrayList>(); - List buckets = (List) terms.getBuckets(); for (Bucket bucket : buckets) { Map parentCountObject = new HashMap(); parentCountObject.put("count", bucket.getDocCount()); diff --git a/search-api/search-core/src/main/java/org/sunbird/search/dto/SearchDTO.java b/search-api/search-core/src/main/java/org/sunbird/search/dto/SearchDTO.java index 71304a021..1d1392572 100644 --- a/search-api/search-core/src/main/java/org/sunbird/search/dto/SearchDTO.java +++ b/search-api/search-core/src/main/java/org/sunbird/search/dto/SearchDTO.java @@ -16,12 +16,15 @@ public class SearchDTO { private int limit; private int offset; boolean fuzzySearch = false; + boolean secureSettings = false; + String userOrgId = ""; private Map additionalProperties = new HashMap(); private Map softConstraints = new HashMap(); private List> aggregations = new ArrayList<>(); private List implicitFilterProperties; - - + private List multiFilterProperties; + boolean isSecureSettingsDisabled = false; + private Map postFilter = new HashMap<>(); public SearchDTO() { super(); @@ -65,13 +68,20 @@ public Map getSortBy() { public void setSortBy(Map sortBy) { this.sortBy = sortBy; } - public boolean isFuzzySearch() { return fuzzySearch; } public void setFuzzySearch(boolean fuzzySearch) { this.fuzzySearch = fuzzySearch; } + public boolean isSecureSettings() { + return secureSettings; + } + public void setSecureSettings(boolean secureSettings) { + this.secureSettings = secureSettings; + } + public String getUserOrgId() {return userOrgId;} + public void setUserOrgId(String userOrgId) {this.userOrgId = userOrgId;} public Map getAdditionalProperties() { return additionalProperties; } @@ -120,4 +130,28 @@ public List getImplicitFilterProperties() { public void setImplicitFilterProperties(List implicitFilterProperties) { this.implicitFilterProperties = implicitFilterProperties; } + + public List getMultiFilterProperties() { + return multiFilterProperties; + } + + public void setMultiFilterProperties(List multiFilterProperties) { + this.multiFilterProperties = multiFilterProperties; + } + + public boolean isSecureSettingsDisabled() { + return isSecureSettingsDisabled; + } + + public void setSecureSettingsDisabled(boolean secureSettingsDisabled) { + isSecureSettingsDisabled = secureSettingsDisabled; + } + + public Map getPostFilter() { + return postFilter; + } + + public void setPostFilter(Map postFilter) { + this.postFilter = postFilter; + } } diff --git a/search-api/search-core/src/main/java/org/sunbird/search/processor/SearchProcessor.java b/search-api/search-core/src/main/java/org/sunbird/search/processor/SearchProcessor.java index 5d163638d..1c1eece78 100644 --- a/search-api/search-core/src/main/java/org/sunbird/search/processor/SearchProcessor.java +++ b/search-api/search-core/src/main/java/org/sunbird/search/processor/SearchProcessor.java @@ -4,25 +4,14 @@ import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.collections.MapUtils; import org.apache.commons.lang.StringUtils; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.common.lucene.search.function.CombineFunction; -import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery.ScoreMode; -import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.*; import org.elasticsearch.index.query.MultiMatchQueryBuilder.Type; -import org.elasticsearch.index.query.Operator; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.index.query.RangeQueryBuilder; -import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; -import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder.FilterFunctionBuilder; -import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; -import org.elasticsearch.search.aggregations.Aggregation; -import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.*; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -38,11 +27,7 @@ import scala.concurrent.ExecutionContext; import scala.concurrent.Future; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; +import java.util.*; import java.util.stream.Collectors; public class SearchProcessor { @@ -56,7 +41,7 @@ public SearchProcessor() { ElasticSearchUtil.initialiseESClient(SearchConstants.COMPOSITE_SEARCH_INDEX, Platform.config.getString("search.es_conn_info")); } - + public SearchProcessor(String indexName) { } @@ -65,9 +50,31 @@ public Future> processSearch(SearchDTO searchDTO, boolean in throws Exception { List> groupByFinalList = new ArrayList>(); SearchSourceBuilder query = processSearchQuery(searchDTO, groupByFinalList, true); - Future searchResponse = ElasticSearchUtil.search( - SearchConstants.COMPOSITE_SEARCH_INDEX, - query); + + Future searchResponse = null; + boolean enableFuzzyWhenNoResults = Platform.config.hasPath("search.fields.enable.fuzzy.when.noresult") && + Platform.config.getBoolean("search.fields.enable.fuzzy.when.noresult"); + if (enableFuzzyWhenNoResults) { + //Let's call with Default fuzzy value given in request + int exactMatchCount = ElasticSearchUtil.count(SearchConstants.COMPOSITE_SEARCH_INDEX, query); + + //If no results and fuzzy was false then set fuzzy to true + //If no results when fuzzy was true, return the same. + if (exactMatchCount == 0 && !searchDTO.isFuzzySearch()) { + searchDTO.setFuzzySearch(true); + groupByFinalList.clear(); + query = processSearchQuery(searchDTO, groupByFinalList, true); + } + } + + if (searchDTO.isSecureSettingsDisabled()) { + BoolQueryBuilder mainQuery = QueryBuilders.boolQuery(); + mainQuery.must(query.query()); // Preserve the existing query + mainQuery.filter(getPostFilterQuery(searchDTO.getPostFilter())); // Apply filtering for aggregations + query.query(mainQuery); + } + + searchResponse = ElasticSearchUtil.search(SearchConstants.COMPOSITE_SEARCH_INDEX, query); return searchResponse.map(new Mapper>() { public Map apply(SearchResponse searchResult) { @@ -112,7 +119,7 @@ public Map processCount(SearchDTO searchDTO) throws Exception { /** * Returns the list of words which are synonyms of the synsetIds passed in the * request - * + * * @param synsetIds * @return * @throws Exception @@ -177,7 +184,7 @@ public Map multiWordDocSearch(List synsetIds) throws Exc /** * Returns list of synsetsIds which has valid documents in composite index - * + * * @param synsetIds * @return * @throws Exception @@ -240,6 +247,9 @@ private SearchSourceBuilder processSearchQuery(SearchDTO searchDTO, List> groupByList, SearchSourceBuilder searchSourceBuilder) { TermsAggregationBuilder termBuilder = null; + List nonTextFields = ElasticSearchUtil.getNonTextFields(); if (groupByList != null && !groupByList.isEmpty()) { + HashMap> nestedAggregation = new HashMap<>(); for (Map groupByMap : groupByList) { String groupByParent = (String) groupByMap.get("groupByParent"); - termBuilder = AggregationBuilders.terms(groupByParent) - .field(groupByParent + SearchConstants.RAW_FIELD_EXTENSION) - .size(ElasticSearchUtil.defaultResultLimit); + if (!groupByParent.contains(".")) { + if (nonTextFields.contains(groupByParent)) { + termBuilder = AggregationBuilders.terms(groupByParent) + .field(groupByParent) + .size(ElasticSearchUtil.defaultResultLimit); + }else { + termBuilder = AggregationBuilders.terms(groupByParent) + .field(groupByParent + SearchConstants.RAW_FIELD_EXTENSION) + .size(ElasticSearchUtil.defaultResultLimit); + } List groupByChildList = (List) groupByMap.get("groupByChildList"); if (groupByChildList != null && !groupByChildList.isEmpty()) { for (String childGroupBy : groupByChildList) { @@ -290,6 +309,27 @@ private void setAggregations(List> groupByList, } } searchSourceBuilder.aggregation(termBuilder); + } else { + if (nestedAggregation.get(groupByParent.split("\\.")[0]) != null) { + nestedAggregation.get(groupByParent.split("\\.")[0]).add(groupByParent.split("\\.")[1]); + } else { + List nestedAggrList = new ArrayList<>(); + nestedAggrList.add(groupByParent.split("\\.")[1]); + nestedAggregation.put(groupByParent.split("\\.")[0], nestedAggrList); + } + } + } + if (!nestedAggregation.isEmpty()) { + for (Map.Entry> mapData : nestedAggregation.entrySet()) { + AggregationBuilder nestedAggregationBuilder = AggregationBuilders.nested(mapData.getKey(), mapData.getKey()); + for (String nestedValue : mapData.getValue()) { + termBuilder = AggregationBuilders.terms(nestedValue) + .field(mapData.getKey() + "." + nestedValue + SearchConstants.RAW_FIELD_EXTENSION) + .size(ElasticSearchUtil.defaultResultLimit); + nestedAggregationBuilder.subAggregation(termBuilder); + } + searchSourceBuilder.aggregation(nestedAggregationBuilder); + } } } } @@ -304,9 +344,55 @@ private QueryBuilder prepareSearchQuery(SearchDTO searchDTO) { QueryBuilder queryBuilder = null; String totalOperation = searchDTO.getOperation(); List properties = searchDTO.getProperties(); + if (searchDTO.isSecureSettingsDisabled()) { + formQueryImpl(properties, queryBuilder, boolQuery, totalOperation, searchDTO.isFuzzySearch(), searchDTO); + } else { + if (searchDTO.isSecureSettings() == false) + formQuery(properties, queryBuilder, boolQuery, totalOperation, searchDTO.isFuzzySearch()); + else + formQueryImpl(properties, queryBuilder, boolQuery, totalOperation, searchDTO.isFuzzySearch(), searchDTO); + } + + if (searchDTO.getMultiFilterProperties() != null) { + if (searchDTO.isSecureSettingsDisabled()) { + formQueryImpl(searchDTO.getMultiFilterProperties(), queryBuilder, boolQuery, totalOperation, searchDTO.isFuzzySearch(), searchDTO); + } else { + if (searchDTO.isSecureSettings() == false) + formQuery(searchDTO.getMultiFilterProperties(), queryBuilder, boolQuery, SearchConstants.SEARCH_OPERATION_OR, searchDTO.isFuzzySearch()); + else { + formQueryImpl(searchDTO.getMultiFilterProperties(), queryBuilder, boolQuery, totalOperation, searchDTO.isFuzzySearch(), searchDTO); + } + } + } + Map softConstraints = searchDTO.getSoftConstraints(); + if (null != softConstraints && !softConstraints.isEmpty()) { + boolQuery.should(getSoftConstraintQuery(softConstraints)); + searchDTO.setSortBy(null); + // relevanceSort = true; + } + return boolQuery; + } + + private void formQuery(List properties, QueryBuilder queryBuilder, BoolQueryBuilder boolQuery, String operation, Boolean fuzzy) { + formQueryImpl(properties, queryBuilder, boolQuery, operation, fuzzy, null); + } + + private void formQueryImpl(List properties, QueryBuilder queryBuilder, BoolQueryBuilder boolQuery, String operation, Boolean fuzzy, SearchDTO searchDTO) { + boolean enableSecureSettings = false; + boolean disableSecureSettings = false; + if (searchDTO != null) { + enableSecureSettings = searchDTO.isSecureSettings(); + disableSecureSettings = searchDTO.isSecureSettingsDisabled(); + } for (Map property : properties) { String opertation = (String) property.get("operation"); + Object objValues = property.get("values"); + Map valuesMap = new HashMap<>(); + if (objValues instanceof Map) { + valuesMap = (Map) property.get("values"); + } + List values; try { values = (List) property.get("values"); @@ -320,17 +406,29 @@ private QueryBuilder prepareSearchQuery(SearchDTO searchDTO) { if (propertyName.equals("*")) { relevanceSort = true; propertyName = "all_fields"; - queryBuilder = getAllFieldsPropertyQuery(values); + queryBuilder = getAllFieldsPropertyQuery(values, fuzzy); + if (enableSecureSettings) { + boolQuery.must(getSecureSettingsSearchQuery(searchDTO.getUserOrgId())); + } else { + if (!disableSecureSettings) { + boolQuery.mustNot(getSecureSettingsSearchDefaultQuery()); + } + } boolQuery.must(queryBuilder); continue; } - - propertyName = propertyName + SearchConstants.RAW_FIELD_EXTENSION; - + List nonTextFields = ElasticSearchUtil.getNonTextFields(); + if (!nonTextFields.contains(propertyName)) { + propertyName = propertyName + SearchConstants.RAW_FIELD_EXTENSION; + } switch (opertation) { case SearchConstants.SEARCH_OPERATION_EQUAL: { - queryBuilder = getMustTermQuery(propertyName, values, true); - queryBuilder = checkNestedProperty(queryBuilder, propertyName); + if (MapUtils.isNotEmpty(valuesMap)) { + queryBuilder = getMustTermQuery(valuesMap, true); + } else { + queryBuilder = getMustTermQuery(propertyName, values, true); + queryBuilder = checkNestedProperty(queryBuilder, propertyName); + } break; } case SearchConstants.SEARCH_OPERATION_NOT_EQUAL: { @@ -408,21 +506,20 @@ private QueryBuilder prepareSearchQuery(SearchDTO searchDTO) { break; } } - if (totalOperation.equalsIgnoreCase(AND)) { + if (operation.equalsIgnoreCase(AND)) { + if (enableSecureSettings) { + boolQuery.must(getSecureSettingsSearchQuery(searchDTO.getUserOrgId())); + } else { + if (!disableSecureSettings) { + boolQuery.mustNot(getSecureSettingsSearchDefaultQuery()); + } + } boolQuery.must(queryBuilder); } else { boolQuery.should(queryBuilder); } } - - Map softConstraints = searchDTO.getSoftConstraints(); - if (null != softConstraints && !softConstraints.isEmpty()) { - boolQuery.should(getSoftConstraintQuery(softConstraints)); - searchDTO.setSortBy(null); - // relevanceSort = true; - } - return boolQuery; } private QueryBuilder checkNestedProperty(QueryBuilder queryBuilder, String propertyName) { @@ -432,145 +529,12 @@ private QueryBuilder checkNestedProperty(QueryBuilder queryBuilder, String prope return queryBuilder; } - /** - * @param searchDTO - * @return - */ - @SuppressWarnings({ "unchecked", "rawtypes" }) - private QueryBuilder prepareFilteredSearchQuery(SearchDTO searchDTO) { - List filterFunctionBuilder = new ArrayList<>(); - - Map weightages = (Map) searchDTO.getAdditionalProperty("weightagesMap"); - if (weightages == null) { - weightages = new HashMap(); - weightages.put("default_weightage", 1.0f); - } - List querySearchFeilds = ElasticSearchUtil.getQuerySearchFields(); - List properties = searchDTO.getProperties(); - for (Map property : properties) { - String opertation = (String) property.get("operation"); - - List values; - try { - values = (List) property.get("values"); - } catch (Exception e) { - values = Arrays.asList(property.get("values")); - } - - values = values.stream().filter(value -> (null != value)).collect(Collectors.toList()); - String propertyName = (String) property.get("propertyName"); - if (propertyName.equals("*")) { - relevanceSort = true; - propertyName = "all_fields"; - filterFunctionBuilder - .add(new FilterFunctionBuilder(getAllFieldsPropertyQuery(values), - ScoreFunctionBuilders.weightFactorFunction(weightages.get("default_weightage")))); - continue; - } - - propertyName = propertyName + SearchConstants.RAW_FIELD_EXTENSION; - float weight = getweight(querySearchFeilds, propertyName); - switch (opertation) { - case SearchConstants.SEARCH_OPERATION_EQUAL: { - filterFunctionBuilder.add(new FilterFunctionBuilder( - getMustTermQuery(propertyName, values, true), - ScoreFunctionBuilders.weightFactorFunction(weight))); - break; - } - case SearchConstants.SEARCH_OPERATION_NOT_EQUAL: { - filterFunctionBuilder.add(new FilterFunctionBuilder( - getMustTermQuery(propertyName, values, true), - ScoreFunctionBuilders.weightFactorFunction(weight))); - break; - } - case SearchConstants.SEARCH_OPERATION_ENDS_WITH: { - filterFunctionBuilder.add(new FilterFunctionBuilder( - getRegexQuery(propertyName, values), ScoreFunctionBuilders.weightFactorFunction(weight))); - break; - } - case SearchConstants.SEARCH_OPERATION_LIKE: - case SearchConstants.SEARCH_OPERATION_CONTAINS: { - filterFunctionBuilder.add(new FilterFunctionBuilder( - getMatchPhraseQuery(propertyName, values, true), - ScoreFunctionBuilders.weightFactorFunction(weight))); - break; - } - case SearchConstants.SEARCH_OPERATION_NOT_LIKE: { - filterFunctionBuilder.add(new FilterFunctionBuilder( - getMatchPhraseQuery(propertyName, values, false), - ScoreFunctionBuilders.weightFactorFunction(weight))); - break; - } - case SearchConstants.SEARCH_OPERATION_STARTS_WITH: { - filterFunctionBuilder.add(new FilterFunctionBuilder( - getMatchPhrasePrefixQuery(propertyName, values), - ScoreFunctionBuilders.weightFactorFunction(weight))); - break; - } - case SearchConstants.SEARCH_OPERATION_EXISTS: { - filterFunctionBuilder.add( - new FilterFunctionBuilder(getExistsQuery(propertyName, values, true), - ScoreFunctionBuilders.weightFactorFunction(weight))); - break; - } - case SearchConstants.SEARCH_OPERATION_NOT_EXISTS: { - filterFunctionBuilder.add( - new FilterFunctionBuilder(getExistsQuery(propertyName, values, false), - ScoreFunctionBuilders.weightFactorFunction(weight))); - break; - } - case SearchConstants.SEARCH_OPERATION_NOT_IN: { - filterFunctionBuilder.add(new FilterFunctionBuilder( - getNotInQuery(propertyName, values), ScoreFunctionBuilders.weightFactorFunction(weight))); - break; - } - case SearchConstants.SEARCH_OPERATION_GREATER_THAN: { - filterFunctionBuilder.add(new FilterFunctionBuilder( - getRangeQuery(propertyName, values, SearchConstants.SEARCH_OPERATION_GREATER_THAN), - ScoreFunctionBuilders.weightFactorFunction(weight))); - break; - } - case SearchConstants.SEARCH_OPERATION_GREATER_THAN_EQUALS: { - filterFunctionBuilder.add(new FilterFunctionBuilder( - getRangeQuery(propertyName, values, - SearchConstants.SEARCH_OPERATION_GREATER_THAN_EQUALS), - ScoreFunctionBuilders.weightFactorFunction(weight))); - break; - } - case SearchConstants.SEARCH_OPERATION_LESS_THAN: { - filterFunctionBuilder.add(new FilterFunctionBuilder( - getRangeQuery(propertyName, values, SearchConstants.SEARCH_OPERATION_LESS_THAN), - ScoreFunctionBuilders.weightFactorFunction(weight))); - break; - } - case SearchConstants.SEARCH_OPERATION_LESS_THAN_EQUALS: { - filterFunctionBuilder.add(new FilterFunctionBuilder( - getRangeQuery(propertyName, values, SearchConstants.SEARCH_OPERATION_LESS_THAN_EQUALS), - ScoreFunctionBuilders.weightFactorFunction(weight))); - break; - } - case SearchConstants.SEARCH_OPERATION_AND: { - filterFunctionBuilder.add(new FilterFunctionBuilder( - getAndQuery(propertyName, values), - ScoreFunctionBuilders.weightFactorFunction(weight))); - break; - } - } - } - - FunctionScoreQueryBuilder queryBuilder = QueryBuilders - .functionScoreQuery( - filterFunctionBuilder.toArray(new FilterFunctionBuilder[filterFunctionBuilder.size()])) - .boostMode(CombineFunction.REPLACE).scoreMode(ScoreMode.SUM); - return queryBuilder; - - } private QueryBuilder getAndQuery(String propertyName, List values) { BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery(); for (Object value : values) { queryBuilder.must( - QueryBuilders.matchQuery(propertyName, value).operator(Operator.AND)); + QueryBuilders.matchQuery(propertyName, value).operator(Operator.AND).fuzzyTranspositions(false)); } return queryBuilder; } @@ -597,7 +561,7 @@ private float getweight(List querySearchFeilds, String propertyName) { * @param values * @return */ - private QueryBuilder getAllFieldsPropertyQuery(List values) { + private QueryBuilder getAllFieldsPropertyQuery(List values, Boolean fuzzy) { List queryFields = ElasticSearchUtil.getQuerySearchFields(); Map queryFieldsMap = new HashMap<>(); for (String field : queryFields) { @@ -608,14 +572,36 @@ private QueryBuilder getAllFieldsPropertyQuery(List values) { } BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery(); for (Object value : values) { - queryBuilder - .should(QueryBuilders.multiMatchQuery(value).fields(queryFieldsMap) - .operator(Operator.AND).type(Type.CROSS_FIELDS).lenient(true)); + if (fuzzy) { + queryBuilder + .should(QueryBuilders.multiMatchQuery(value).fields(queryFieldsMap) + .operator(Operator.AND).fuzziness("AUTO").lenient(true)); + } else { + queryBuilder + .should(QueryBuilders.multiMatchQuery(value).fields(queryFieldsMap) + .operator(Operator.AND).type(Type.CROSS_FIELDS).fuzzyTranspositions(false).lenient(true)); + } } - return queryBuilder; } + private QueryBuilder getSecureSettingsSearchDefaultQuery() { + + QueryBuilder firstNestedQuery =new NestedQueryBuilder("secureSettings", + QueryBuilders.boolQuery() .mustNot(new ExistsQueryBuilder("organisation")), org.apache.lucene.search.join.ScoreMode.None); + QueryBuilder secondNestedQuery= new NestedQueryBuilder("secureSettings", QueryBuilders.boolQuery() + .filter(new RangeQueryBuilder("organisation" + ".length").lte(0)) , org.apache.lucene.search.join.ScoreMode.None); + QueryBuilder query = QueryBuilders.boolQuery() .should(firstNestedQuery).should (secondNestedQuery); + + return query; + } + + private QueryBuilder getSecureSettingsSearchQuery(String org_id) { + QueryBuilder query = new NestedQueryBuilder("secureSettings", + QueryBuilders.boolQuery().must(new ExistsQueryBuilder("secureSettings.organisation")).must(QueryBuilders.termQuery("secureSettings.organisation", org_id)), org.apache.lucene.search.join.ScoreMode.None); + + return query; + } /** * @param softConstraints * @return @@ -630,13 +616,13 @@ private QueryBuilder getSoftConstraintQuery(Map softConstraints) for(Object value: dataList) { queryBuilder .should(QueryBuilders.matchQuery(key + SearchConstants.RAW_FIELD_EXTENSION, value) - .boost(Integer.valueOf((int) data.get(0)).floatValue())); + .boost(Integer.valueOf((int) data.get(0)).floatValue()).fuzzyTranspositions(false)); } } else { queryBuilder.should( QueryBuilders.matchQuery(key + SearchConstants.RAW_FIELD_EXTENSION, data.get(1)) - .boost(Integer.valueOf((int) data.get(0)).floatValue())); + .boost(Integer.valueOf((int) data.get(0)).floatValue()).fuzzyTranspositions(false)); } } return queryBuilder; @@ -648,33 +634,28 @@ private QueryBuilder getSoftConstraintQuery(Map softConstraints) * @return */ private QueryBuilder getRangeQuery(String propertyName, List values, String operation) { - BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery(); + RangeQueryBuilder rangeQuery = QueryBuilders.rangeQuery(propertyName); for (Object value : values) { switch (operation) { - case SearchConstants.SEARCH_OPERATION_GREATER_THAN: { - queryBuilder.should(QueryBuilders - .rangeQuery(propertyName).gt(value)); - break; - } - case SearchConstants.SEARCH_OPERATION_GREATER_THAN_EQUALS: { - queryBuilder.should(QueryBuilders - .rangeQuery(propertyName).gte(value)); - break; - } - case SearchConstants.SEARCH_OPERATION_LESS_THAN: { - queryBuilder.should(QueryBuilders - .rangeQuery(propertyName).lt(value)); - break; - } - case SearchConstants.SEARCH_OPERATION_LESS_THAN_EQUALS: { - queryBuilder.should(QueryBuilders - .rangeQuery(propertyName).lte(value)); - break; - } + case SearchConstants.SEARCH_OPERATION_GREATER_THAN: { + rangeQuery.gt(value); + break; + } + case SearchConstants.SEARCH_OPERATION_GREATER_THAN_EQUALS: { + rangeQuery.gte(value); + break; + } + case SearchConstants.SEARCH_OPERATION_LESS_THAN: { + rangeQuery.lt(value); + break; + } + case SearchConstants.SEARCH_OPERATION_LESS_THAN_EQUALS: { + rangeQuery.lte(value); + break; + } } } - - return queryBuilder; + return rangeQuery; } /** @@ -769,10 +750,10 @@ private QueryBuilder getMustTermQuery(String propertyName, List values, for (Object value : values) { if (match) { queryBuilder.should( - QueryBuilders.matchQuery(propertyName, value)); + QueryBuilders.matchQuery(propertyName, value).fuzzyTranspositions(false)); } else { queryBuilder.mustNot( - QueryBuilders.matchQuery(propertyName, value)); + QueryBuilders.matchQuery(propertyName, value).fuzzyTranspositions(false)); } } @@ -829,7 +810,7 @@ public Future> processSearchQuery(SearchDTO searchDTO, boolean incl SearchSourceBuilder query = processSearchQuery(searchDTO, groupByFinalList, sort); TelemetryManager.log(" search query: " + query); Future searchResponse = ElasticSearchUtil.search(index, query); - + return searchResponse.map(new Mapper>() { public List apply(SearchResponse searchResult) { List response = new ArrayList(); @@ -843,7 +824,7 @@ public List apply(SearchResponse searchResult) { return response; } }, ExecutionContext.Implicits$.MODULE$.global()); - + } public Future processSearchQueryWithSearchResult(SearchDTO searchDTO, boolean includeResults, @@ -935,8 +916,62 @@ private QueryBuilder getSearchQuery(SearchDTO searchDTO) { } private QueryBuilder getQuery(SearchDTO searchDTO) { - return searchDTO.isFuzzySearch() ? prepareFilteredSearchQuery(searchDTO) : prepareSearchQuery(searchDTO); + return prepareSearchQuery(searchDTO); } + private static QueryBuilder getPostFilterQuery(Map postFilter) { + // Creating the post_filter bool query + BoolQueryBuilder postFilterBoolQuery = QueryBuilders.boolQuery(); + BoolQueryBuilder nestedBoolQuery = QueryBuilders.boolQuery(); + + for (Map.Entry filters : postFilter.entrySet()) { + if (filters.getValue() instanceof List) { + for (String value : (List) filters.getValue()) { + nestedBoolQuery.must(QueryBuilders.termQuery(filters.getKey(), value)); + } + } else if (filters.getValue() instanceof String) { + nestedBoolQuery.must(QueryBuilders.termQuery(filters.getKey(), ((String) filters.getValue()).toLowerCase())); + } + } + NestedQueryBuilder nestedQuery = QueryBuilders.nestedQuery( + "secureSettings", + nestedBoolQuery, + org.apache.lucene.search.join.ScoreMode.None + ); + postFilterBoolQuery.should(nestedQuery); + // Nested query for "secureSettings" with must_not exists + BoolQueryBuilder mustNotBoolQuery = QueryBuilders.boolQuery(); + NestedQueryBuilder nestedMustNotQuery = QueryBuilders.nestedQuery( + "secureSettings", + new ExistsQueryBuilder("secureSettings.organisation") + .boost(1.0f), + org.apache.lucene.search.join.ScoreMode.None + ); + mustNotBoolQuery.mustNot(nestedMustNotQuery); + postFilterBoolQuery.should(mustNotBoolQuery); + + return postFilterBoolQuery; + } + + private QueryBuilder getMustTermQuery(Map propertyMap, boolean match) { + BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery(); + Set> entrySet = propertyMap.entrySet(); + for (Map.Entry entry : entrySet) { + String propertyName = (String) entry.getKey(); + propertyName = propertyName + SearchConstants.RAW_FIELD_EXTENSION; + List valueList = (List) entry.getValue(); + for (Object value : valueList) { + if (match) { + queryBuilder.should( + QueryBuilders.matchQuery(propertyName, value).fuzzyTranspositions(false)); + } else { + queryBuilder.mustNot( + QueryBuilders.matchQuery(propertyName, value).fuzzyTranspositions(false)); + } + } + } + + return queryBuilder; + } } \ No newline at end of file diff --git a/search-api/search-core/src/main/java/org/sunbird/search/util/SearchConstants.java b/search-api/search-core/src/main/java/org/sunbird/search/util/SearchConstants.java index 27b68a8ba..72599abea 100644 --- a/search-api/search-core/src/main/java/org/sunbird/search/util/SearchConstants.java +++ b/search-api/search-core/src/main/java/org/sunbird/search/util/SearchConstants.java @@ -114,4 +114,8 @@ public class SearchConstants { public static final String softConstraints = "softConstraints"; public static final String setDefaultVisibility = "setDefaultVisibility"; public static String soft = "soft"; + public static String secureSettings = "secureSettings"; + public static String isSecureSettingsDisabled = "isSecureSettingsDisabled"; + public static String secureSettingsOrganisation = "secureSettings.organisation"; + public static final String must = "must"; } diff --git a/search-api/search-service/app/controllers/SearchBaseController.scala b/search-api/search-service/app/controllers/SearchBaseController.scala index 9e1735b35..5938c93ac 100644 --- a/search-api/search-service/app/controllers/SearchBaseController.scala +++ b/search-api/search-service/app/controllers/SearchBaseController.scala @@ -25,7 +25,7 @@ abstract class SearchBaseController(protected val cc: ControllerComponents)(impl } def commonHeaders()(implicit request: Request[AnyContent]): java.util.Map[String, Object] = { - val customHeaders = Map("x-channel-id" -> "CHANNEL_ID", "x-consumer-id" -> "CONSUMER_ID", "x-app-id" -> "APP_ID", "x-session-id" -> "SESSION_ID", "x-device-id" -> "DEVICE_ID") + val customHeaders = Map("x-authenticated-user-orgid" -> "x-user-channel-id","x-channel-id" -> "CHANNEL_ID", "x-consumer-id" -> "CONSUMER_ID", "x-app-id" -> "APP_ID", "x-session-id" -> "SESSION_ID", "x-device-id" -> "DEVICE_ID") val headers = request.headers.headers.groupBy(_._1).mapValues(_.map(_._2)) val appHeaders = headers.filter(header => customHeaders.keySet.contains(header._1.toLowerCase)) .map(entry => (customHeaders.get(entry._1.toLowerCase()).get, entry._2.head)) diff --git a/search-api/search-service/app/controllers/SearchController.scala b/search-api/search-service/app/controllers/SearchController.scala index e76bcc5e3..451e7c33c 100644 --- a/search-api/search-service/app/controllers/SearchController.scala +++ b/search-api/search-service/app/controllers/SearchController.scala @@ -57,4 +57,25 @@ class SearchController @Inject()(@Named(ActorNames.SEARCH_ACTOR) searchActor: Ac setHeaderContext(internalReq) getResult(mgr.count(internalReq, searchActor), ApiId.APPLICATION_COUNT) } + + def searchV4() = loggingAction.async { implicit request => + val internalReq = getRequest(ApiId.APPLICATION_SEARCH) + val requestMap: java.util.Map[String, Any] = internalReq.getRequest.asInstanceOf[util.Map[String, Any]] + requestMap.put(SearchConstants.isSecureSettingsDisabled, true) + setHeaderContext(internalReq) + val filters = internalReq.getRequest.getOrDefault(SearchConstants.filters, new java.util.HashMap()).asInstanceOf[java.util.Map[String, Object]] + val visibilityObject = filters.getOrDefault("visibility","") + var visibility:util.List[String] = null + if (visibilityObject != null) { + if (visibilityObject.isInstanceOf[util.ArrayList[_]]) visibility = visibilityObject.asInstanceOf[util.ArrayList[String]] + else if (visibilityObject.isInstanceOf[String]) visibility = util.Arrays.asList(visibilityObject.asInstanceOf[String]) + } + if (visibility.contains("Private")) { + getErrorResponse(ApiId.APPLICATION_SEARCH, apiVersion, SearchConstants.ERR_ACCESS_DENIED, "Cannot access private content through public search api") + } + else { + internalReq.getContext.put(SearchConstants.setDefaultVisibility, "true") + getResult(mgr.search(internalReq, searchActor), ApiId.APPLICATION_SEARCH) + } + } } diff --git a/search-api/search-service/conf/application.conf b/search-api/search-service/conf/application.conf index a803688db..3dd0e8627 100644 --- a/search-api/search-service/conf/application.conf +++ b/search-api/search-service/conf/application.conf @@ -312,4 +312,12 @@ ekstepPlatformApiUserId="search-service" content.tagging.property=["subject","medium"] -search.payload.log_enable=true \ No newline at end of file +search.payload.log_enable=true + +#Folling configuration would enable the fuzzy search when there are no matches found for given query. +search.fields.enable.fuzzy.when.noresult=false + +#Following configuration would enable the secureSettings search +search.fields.enable.secureSettings=false + +non.text.fields=["startDateTimeInEpoch","endDateTimeInEpoch"] \ No newline at end of file diff --git a/search-api/search-service/conf/routes b/search-api/search-service/conf/routes index 44e6c9ad5..5a39e14a6 100644 --- a/search-api/search-service/conf/routes +++ b/search-api/search-service/conf/routes @@ -10,4 +10,5 @@ POST /v3/private/search controllers.SearchController.privateSearch() POST /v2/search/count controllers.SearchController.count() POST /v3/count controllers.SearchController.count() #POST /v2/metrics controllers.MetricsController.search() -#POST /v3/metrics controllers.MetricsController.search() \ No newline at end of file +#POST /v3/metrics controllers.MetricsController.search() +POST /v4/search controllers.SearchController.searchV4() \ No newline at end of file