import static com.google.common.base.Preconditions.checkNotNull;
import static org.opendaylight.yangtools.yang.common.YangConstants.RFC6020_YANG_FILE_EXTENSION;
-import com.google.common.base.MoreObjects;
import com.google.common.collect.ImmutableSet;
import io.micrometer.core.annotation.Timed;
import jakarta.transaction.Transactional;
-import java.io.ByteArrayInputStream;
import java.io.IOException;
-import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collection;
import org.onap.cps.ri.repository.SchemaSetRepository;
import org.onap.cps.ri.repository.YangResourceRepository;
import org.onap.cps.spi.CpsModulePersistenceService;
+import org.onap.cps.yang.YangTextSchemaSourceSetBuilder;
import org.opendaylight.yangtools.yang.common.Revision;
import org.opendaylight.yangtools.yang.model.repo.api.RevisionSourceIdentifier;
import org.opendaylight.yangtools.yang.model.repo.api.YangTextSchemaSource;
final RevisionSourceIdentifier revisionSourceIdentifier =
createIdentifierFromSourceName(checkNotNull(sourceName));
- final YangTextSchemaSource tempYangTextSchemaSource = new YangTextSchemaSource(revisionSourceIdentifier) {
- @Override
- public Optional<String> getSymbolicName() {
- return Optional.empty();
- }
-
- @Override
- protected MoreObjects.ToStringHelper addToStringAttributes(
- final MoreObjects.ToStringHelper toStringHelper) {
- return toStringHelper;
- }
-
- @Override
- public InputStream openStream() {
- return new ByteArrayInputStream(source.getBytes(StandardCharsets.UTF_8));
- }
- };
+ final YangTextSchemaSource tempYangTextSchemaSource =
+ YangTextSchemaSourceSetBuilder.getYangTextSchemaSource(source, revisionSourceIdentifier);
try {
final YangModelDependencyInfo yangModelDependencyInfo
= YangModelDependencyInfo.forYangText(tempYangTextSchemaSource);
@Override
@Transactional
- public List<Long> findAnchorIdsForPagination(final DataspaceEntity dataspaceEntity, final CpsPathQuery cpsPathQuery,
+ public List<Long> findAnchorIdsForPagination(final DataspaceEntity dataspaceEntity,
+ final CpsPathQuery cpsPathQuery,
final PaginationOption paginationOption) {
final Query query = fragmentQueryBuilder.getQueryForAnchorIdsForPagination(
dataspaceEntity, cpsPathQuery, paginationOption);
Pattern.compile("^$|^all$|^none$|^direct$|^[0-9]+$|^-1$|^1$");
/**
- * Get depth.
- *
- * @return depth: -1 for all descendants, 0 for no descendants, or positive value for fixed level of descendants
+ * depth : -1 for all descendants, 0 for no descendants, or positive value for fixed level of descendants.
*/
@Getter
private final int depth;
public void sendCloudEvent(final String topicName, final String eventKey, final CloudEvent event) {
final CompletableFuture<SendResult<String, CloudEvent>> eventFuture =
cloudEventKafkaTemplate.send(topicName, eventKey, event);
- eventFuture.whenComplete((result, e) -> {
- if (e == null) {
- log.debug("Successfully sent event to topic : {} , Event : {}", result.getRecordMetadata().topic(),
- result.getProducerRecord().value());
-
- } else {
- log.error("Unable to send event to topic : {} due to {}", topicName, e.getMessage());
- }
- });
+ eventFuture.whenComplete((result, e) -> logOutcome(topicName, result, e));
}
/**
* @param event message payload
*/
public void sendEvent(final String topicName, final String eventKey, final Headers eventHeaders, final T event) {
-
final ProducerRecord<String, T> producerRecord =
new ProducerRecord<>(topicName, null, eventKey, event, eventHeaders);
final CompletableFuture<SendResult<String, T>> eventFuture = legacyKafkaEventTemplate.send(producerRecord);
*/
public void sendEvent(final String topicName, final String eventKey, final Map<String, Object> eventHeaders,
final T event) {
-
sendEvent(topicName, eventKey, convertToKafkaHeaders(eventHeaders), event);
}
private void handleLegacyEventCallback(final String topicName,
- final CompletableFuture<SendResult<String, T>> eventFuture) {
- eventFuture.whenComplete((result, e) -> {
- if (e == null) {
- log.debug("Successfully sent event to topic : {} , Event : {}", result.getRecordMetadata().topic(),
- result.getProducerRecord().value());
- } else {
- log.error("Unable to send event to topic : {} due to {}", topicName, e.getMessage());
- }
- });
+ final CompletableFuture<SendResult<String, T>> eventFuture) {
+ eventFuture.whenComplete((result, e) -> logOutcome(topicName, result, e));
}
private Headers convertToKafkaHeaders(final Map<String, Object> eventMessageHeaders) {
return eventHeaders;
}
+ private static void logOutcome(final String topicName, final SendResult<String, ?> result, final Throwable e) {
+ if (e == null) {
+ final Object event = result.getProducerRecord().value();
+ log.debug("Successfully sent event to topic : {} , Event : {}", topicName, event);
+ } else {
+ log.error("Unable to send event to topic : {} due to {}", topicName, e.getMessage());
+ }
+ }
+
}
private String getFileContentAsString(final String fileName) {
try (final InputStream inputStream = getClass().getClassLoader().getResourceAsStream(fileName)) {
- return new String(inputStream.readAllBytes(), StandardCharsets.UTF_8);
+ return new String(inputStream != null ? inputStream.readAllBytes() : null, StandardCharsets.UTF_8);
} catch (final Exception exception) {
final String message = String.format("Onboarding failed as unable to read file: %s", fileName);
log.debug(message);
objectMapper.getTypeFactory().constructCollectionType(List.class, collectionEntryType);
return objectMapper.readValue(jsonContent, collectionType);
} catch (final JsonProcessingException e) {
- log.error("Parsing error occurred while converting JSON content to specific class type.");
+ log.error("Parsing error occurred while converting JSON content to json array.");
throw new DataValidationException("Parsing error occurred while converting "
+ "JSON content to specific class type.", e.getMessage());
}
generateSchemaContext(yangResourceNameToContent);
}
+ /**
+ * Create a new YangTextSchemaSource.
+ *
+ * @param source Yang (module) source as string
+ * @param revisionSourceIdentifier Revision of the source
+ * @return a YangTextSchemaSource created from the given source
+ */
+ public static YangTextSchemaSource getYangTextSchemaSource(final String source,
+ final RevisionSourceIdentifier
+ revisionSourceIdentifier) {
+ return new YangTextSchemaSource(revisionSourceIdentifier) {
+ @Override
+ public Optional<String> getSymbolicName() {
+ return Optional.empty();
+ }
+
+ @Override
+ protected MoreObjects.ToStringHelper addToStringAttributes(
+ final MoreObjects.ToStringHelper toStringHelper) {
+ return toStringHelper;
+ }
+
+ @Override
+ public InputStream openStream() {
+ return new ByteArrayInputStream(source.getBytes(StandardCharsets.UTF_8));
+ }
+ };
+ }
+
+
private record YangTextSchemaSourceSetImpl(SchemaContext schemaContext) implements YangTextSchemaSourceSet {
@Override
final RevisionSourceIdentifier revisionSourceIdentifier =
createIdentifierFromSourceName(checkNotNull(sourceName));
- return new YangTextSchemaSource(revisionSourceIdentifier) {
- @Override
- public Optional<String> getSymbolicName() {
- return Optional.empty();
- }
-
- @Override
- protected MoreObjects.ToStringHelper addToStringAttributes(
- final MoreObjects.ToStringHelper toStringHelper) {
- return toStringHelper;
- }
-
- @Override
- public InputStream openStream() {
- return new ByteArrayInputStream(source.getBytes(StandardCharsets.UTF_8));
- }
- };
+ return getYangTextSchemaSource(source, revisionSourceIdentifier);
}
private static RevisionSourceIdentifier createIdentifierFromSourceName(final String sourceName) {
private final Sleeper sleeper;
private static final Pattern ERROR_CODE_PATTERN = Pattern.compile("(\\d{3})");
private int decisionCounter = 0;
- @SuppressWarnings("CanBeFinal") // Do NOT change below to final as it needs to be set during test
+ @SuppressWarnings({"CanBeFinal", "FieldCanBeLocal"})
+ // Do NOT change below to final as it needs to be set during test
private static int slowResponseTimeInSeconds = 40;
@Override