Reformat files in testing

This commit is contained in:
Tommy Lillehagen 2017-10-06 15:32:23 +01:00
parent f98942d7ad
commit fcc4bdae7e
47 changed files with 1327 additions and 1269 deletions

View File

@ -13,7 +13,8 @@ import java.util.concurrent.CompletableFuture.supplyAsync
class AttachmentDemoTest {
// run with a 10,000,000 bytes in-memory zip file. In practice, a slightly bigger file will be used (~10,002,000 bytes).
@Test fun `attachment demo using a 10MB zip file`() {
@Test
fun `attachment demo using a 10MB zip file`() {
val numOfExpectedBytes = 10_000_000
driver(isDebug = true, portAllocation = PortAllocation.Incremental(20000)) {
val demoUser = listOf(User("demo", "demo", setOf(startFlowPermission<AttachmentDemoFlow>())))

View File

@ -141,11 +141,12 @@ class FloatingRatePaymentEvent(date: LocalDate,
val CSVHeader = RatePaymentEvent.CSVHeader + ",FixingDate"
}
override val flow: Amount<Currency> get() {
// TODO: Should an uncalculated amount return a zero ? null ? etc.
val v = rate.ratioUnit?.value ?: return Amount(0, notional.token)
return Amount(dayCountFactor.times(BigDecimal(notional.quantity)).times(v).toLong(), notional.token)
}
override val flow: Amount<Currency>
get() {
// TODO: Should an uncalculated amount return a zero ? null ? etc.
val v = rate.ratioUnit?.value ?: return Amount(0, notional.token)
return Amount(dayCountFactor.times(BigDecimal(notional.quantity)).times(v).toLong(), notional.token)
}
override fun toString(): String = "FloatingPaymentEvent $accrualStartDate -> $accrualEndDate : $dayCountFactor : $days : $date : $notional : $rate (fix on $fixingDate): $flow"

View File

@ -242,7 +242,7 @@ class NodeInterestRatesTest : TestDependencyInjectionBase() {
}
private fun makePartialTX() = TransactionBuilder(DUMMY_NOTARY).withItems(
TransactionState(1000.DOLLARS.CASH `issued by` DUMMY_CASH_ISSUER `owned by` ALICE, Cash.PROGRAM_ID, DUMMY_NOTARY))
TransactionState(1000.DOLLARS.CASH `issued by` DUMMY_CASH_ISSUER `owned by` ALICE, Cash.PROGRAM_ID, DUMMY_NOTARY))
private fun makeFullTx() = makePartialTX().withItems(dummyCommand())
}

View File

@ -86,7 +86,7 @@ class VisualiserViewModel {
// top right: 33.0469,64.3209
try {
return node.place.coordinate.project(view.mapImage.fitWidth, view.mapImage.fitHeight, 64.3209, 29.8406, -23.2031, 33.0469)
} catch(e: Exception) {
} catch (e: Exception) {
throw Exception("Cannot project ${node.started!!.info.chooseIdentity()}", e)
}
}

View File

@ -141,6 +141,7 @@ class IRSSimulation(networkSendManuallyPumped: Boolean, runAsync: Boolean, laten
node2.internals.registerInitiatedFlow(FixingFlow.Fixer::class.java)
val notaryId = node1.rpcOps.notaryIdentities().first()
@InitiatingFlow
class StartDealFlow(val otherParty: Party,
val payload: AutoOffer) : FlowLogic<SignedTransaction>() {

View File

@ -162,8 +162,10 @@ abstract class Simulation(val networkSendManuallyPumped: Boolean,
// These are used from the network visualiser tool.
private val _allFlowSteps = PublishSubject.create<Pair<SimulatedNode, ProgressTracker.Change>>()
private val _doneSteps = PublishSubject.create<Collection<SimulatedNode>>()
@Suppress("unused") val allFlowSteps: Observable<Pair<SimulatedNode, ProgressTracker.Change>> = _allFlowSteps
@Suppress("unused") val doneSteps: Observable<Collection<SimulatedNode>> = _doneSteps
@Suppress("unused")
val allFlowSteps: Observable<Pair<SimulatedNode, ProgressTracker.Change>> = _allFlowSteps
@Suppress("unused")
val doneSteps: Observable<Collection<SimulatedNode>> = _doneSteps
private var pumpCursor = 0

View File

@ -21,7 +21,8 @@ class IRSSimulationTest {
unsetCordappPackages()
}
@Test fun `runs to completion`() {
@Test
fun `runs to completion`() {
LogHelper.setLevel("+messages") // FIXME: Don't manipulate static state in tests.
val sim = IRSSimulation(false, false, null)
val future = sim.start()

View File

@ -31,7 +31,7 @@ private class NotaryDemoClientApi(val rpc: CordaRPCOps) {
private val counterparty by lazy {
val parties = rpc.networkMapSnapshot()
parties.fold(ArrayList<PartyAndCertificate>()) { acc, elem ->
acc.addAll(elem.legalIdentitiesAndCerts.filter { it.name == BOB.name})
acc.addAll(elem.legalIdentitiesAndCerts.filter { it.name == BOB.name })
acc
}.single().party
}

View File

@ -1,6 +1,6 @@
/**
* Copyright (C) 2015 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* <p>
* Please see distribution for license.
*/
package com.opengamma.strata.examples.marketdata;
@ -19,46 +19,46 @@ import java.util.stream.Collectors;
*/
public class DirectoryMarketDataBuilder extends ExampleMarketDataBuilder {
/**
* The path to the root of the directory structure.
*/
private final Path rootPath;
/**
* The path to the root of the directory structure.
*/
private final Path rootPath;
/**
* Constructs an instance.
*
* @param rootPath the path to the root of the directory structure
*/
public DirectoryMarketDataBuilder(Path rootPath) {
this.rootPath = rootPath;
}
//-------------------------------------------------------------------------
@Override
protected Collection<ResourceLocator> getAllResources(String subdirectoryName) {
File dir = rootPath.resolve(subdirectoryName).toFile();
if (!dir.exists()) {
throw new IllegalArgumentException(Messages.format("Directory does not exist: {}", dir));
/**
* Constructs an instance.
*
* @param rootPath the path to the root of the directory structure
*/
public DirectoryMarketDataBuilder(Path rootPath) {
this.rootPath = rootPath;
}
return Arrays.stream(dir.listFiles())
.filter(f -> !f.isHidden())
.map(ResourceLocator::ofFile)
.collect(Collectors.toList());
}
@Override
protected ResourceLocator getResource(String subdirectoryName, String resourceName) {
File file = rootPath.resolve(subdirectoryName).resolve(resourceName).toFile();
if (!file.exists()) {
return null;
//-------------------------------------------------------------------------
@Override
protected Collection<ResourceLocator> getAllResources(String subdirectoryName) {
File dir = rootPath.resolve(subdirectoryName).toFile();
if (!dir.exists()) {
throw new IllegalArgumentException(Messages.format("Directory does not exist: {}", dir));
}
return Arrays.stream(dir.listFiles())
.filter(f -> !f.isHidden())
.map(ResourceLocator::ofFile)
.collect(Collectors.toList());
}
return ResourceLocator.ofFile(file);
}
@Override
protected boolean subdirectoryExists(String subdirectoryName) {
File file = rootPath.resolve(subdirectoryName).toFile();
return file.exists();
}
@Override
protected ResourceLocator getResource(String subdirectoryName, String resourceName) {
File file = rootPath.resolve(subdirectoryName).resolve(resourceName).toFile();
if (!file.exists()) {
return null;
}
return ResourceLocator.ofFile(file);
}
@Override
protected boolean subdirectoryExists(String subdirectoryName) {
File file = rootPath.resolve(subdirectoryName).toFile();
return file.exists();
}
}

View File

@ -1,6 +1,6 @@
/**
* Copyright (C) 2015 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* <p>
* Please see distribution for license.
*/
package com.opengamma.strata.examples.marketdata;
@ -18,40 +18,41 @@ import java.util.Locale;
*/
public final class ExampleData {
/**
* Restricted constructor.
*/
private ExampleData() {
}
//-------------------------------------------------------------------------
/**
* Loads a golden copy of expected results from a text file.
*
* @param name the name of the results
* @return the loaded results
*/
public static String loadExpectedResults(String name) {
String classpathResourceName = String.format(Locale.ENGLISH, "classpath:goldencopy/%s.txt", name);
ResourceLocator resourceLocator = ResourceLocator.of(classpathResourceName);
try {
return resourceLocator.getCharSource().read().trim();
} catch (IOException ex) {
throw new UncheckedIOException(name, ex);
/**
* Restricted constructor.
*/
private ExampleData() {
}
}
/**
* Loads a trade report template from the standard INI format.
*
* @param templateName the name of the template
* @return the loaded report template
*/
public static TradeReportTemplate loadTradeReportTemplate(String templateName) {
String resourceName = String.format(Locale.ENGLISH, "classpath:example-reports/%s.ini", templateName);
ResourceLocator resourceLocator = ResourceLocator.of(resourceName);
IniFile ini = IniFile.of(resourceLocator.getCharSource());
return TradeReportTemplate.load(ini);
}
//-------------------------------------------------------------------------
/**
* Loads a golden copy of expected results from a text file.
*
* @param name the name of the results
* @return the loaded results
*/
public static String loadExpectedResults(String name) {
String classpathResourceName = String.format(Locale.ENGLISH, "classpath:goldencopy/%s.txt", name);
ResourceLocator resourceLocator = ResourceLocator.of(classpathResourceName);
try {
return resourceLocator.getCharSource().read().trim();
} catch (IOException ex) {
throw new UncheckedIOException(name, ex);
}
}
/**
* Loads a trade report template from the standard INI format.
*
* @param templateName the name of the template
* @return the loaded report template
*/
public static TradeReportTemplate loadTradeReportTemplate(String templateName) {
String resourceName = String.format(Locale.ENGLISH, "classpath:example-reports/%s.ini", templateName);
ResourceLocator resourceLocator = ResourceLocator.of(resourceName);
IniFile ini = IniFile.of(resourceLocator.getCharSource());
return TradeReportTemplate.load(ini);
}
}

View File

@ -1,6 +1,6 @@
/**
* Copyright (C) 2015 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* <p>
* Please see distribution for license.
*/
package com.opengamma.strata.examples.marketdata;
@ -10,25 +10,26 @@ package com.opengamma.strata.examples.marketdata;
*/
public final class ExampleMarketData {
/**
* Root resource directory of the built-in example market data
*/
private static final String EXAMPLE_MARKET_DATA_ROOT = "example-marketdata";
/**
* Root resource directory of the built-in example market data
*/
private static final String EXAMPLE_MARKET_DATA_ROOT = "example-marketdata";
/**
* Restricted constructor.
*/
private ExampleMarketData() {
}
/**
* Restricted constructor.
*/
private ExampleMarketData() {
}
//-------------------------------------------------------------------------
/**
* Gets a market data builder for the built-in example market data.
*
* @return the market data builder
*/
public static ExampleMarketDataBuilder builder() {
return ExampleMarketDataBuilder.ofResource(EXAMPLE_MARKET_DATA_ROOT);
}
//-------------------------------------------------------------------------
/**
* Gets a market data builder for the built-in example market data.
*
* @return the market data builder
*/
public static ExampleMarketDataBuilder builder() {
return ExampleMarketDataBuilder.ofResource(EXAMPLE_MARKET_DATA_ROOT);
}
}

View File

@ -1,6 +1,6 @@
/**
* Copyright (C) 2015 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* <p>
* Please see distribution for license.
*/
package com.opengamma.strata.examples.marketdata;
@ -67,355 +67,358 @@ import static com.opengamma.strata.collect.Guavate.toImmutableList;
*/
public abstract class ExampleMarketDataBuilder {
private static final Logger log = LoggerFactory.getLogger(ExampleMarketDataBuilder.class);
private static final Logger log = LoggerFactory.getLogger(ExampleMarketDataBuilder.class);
/** The name of the subdirectory containing historical fixings. */
private static final String HISTORICAL_FIXINGS_DIR = "historical-fixings";
/** The name of the subdirectory containing historical fixings. */
private static final String HISTORICAL_FIXINGS_DIR = "historical-fixings";
/** The name of the subdirectory containing calibrated rates curves. */
private static final String CURVES_DIR = "curves";
/** The name of the curve groups file. */
private static final String CURVES_GROUPS_FILE = "groups.csv";
/** The name of the curve settings file. */
private static final String CURVES_SETTINGS_FILE = "settings.csv";
/** The name of the subdirectory containing calibrated rates curves. */
private static final String CURVES_DIR = "curves";
/** The name of the curve groups file. */
private static final String CURVES_GROUPS_FILE = "groups.csv";
/** The name of the curve settings file. */
private static final String CURVES_SETTINGS_FILE = "settings.csv";
/** The name of the directory containing CDS ISDA yield curve, credit curve and static data. */
private static final String CREDIT_DIR = "credit";
private static final String CDS_YIELD_CURVES_FILE = "cds.yieldCurves.csv";
private static final String SINGLE_NAME_CREDIT_CURVES_FILE = "singleName.creditCurves.csv";
private static final String SINGLE_NAME_STATIC_DATA_FILE = "singleName.staticData.csv";
private static final String INDEX_CREDIT_CURVES_FILE = "index.creditCurves.csv";
private static final String INDEX_STATIC_DATA_FILE = "index.staticData.csv";
/** The name of the directory containing CDS ISDA yield curve, credit curve and static data. */
private static final String CREDIT_DIR = "credit";
private static final String CDS_YIELD_CURVES_FILE = "cds.yieldCurves.csv";
private static final String SINGLE_NAME_CREDIT_CURVES_FILE = "singleName.creditCurves.csv";
private static final String SINGLE_NAME_STATIC_DATA_FILE = "singleName.staticData.csv";
private static final String INDEX_CREDIT_CURVES_FILE = "index.creditCurves.csv";
private static final String INDEX_STATIC_DATA_FILE = "index.staticData.csv";
/** The name of the subdirectory containing simple market quotes. */
private static final String QUOTES_DIR = "quotes";
/** The name of the quotes file. */
private static final String QUOTES_FILE = "quotes.csv";
/** The name of the subdirectory containing simple market quotes. */
private static final String QUOTES_DIR = "quotes";
/** The name of the quotes file. */
private static final String QUOTES_FILE = "quotes.csv";
//-------------------------------------------------------------------------
/**
* Creates an instance from a given classpath resource root location using the class loader
* which created this class.
* <p>
* This is designed to handle resource roots which may physically correspond to a directory on
* disk, or be located within a jar file.
*
* @param resourceRoot the resource root path
* @return the market data builder
*/
public static ExampleMarketDataBuilder ofResource(String resourceRoot) {
return ofResource(resourceRoot, ExampleMarketDataBuilder.class.getClassLoader());
}
//-------------------------------------------------------------------------
/**
* Creates an instance from a given classpath resource root location, using the given class loader
* to find the resource.
* <p>
* This is designed to handle resource roots which may physically correspond to a directory on
* disk, or be located within a jar file.
*
* @param resourceRoot the resource root path
* @param classLoader the class loader with which to find the resource
* @return the market data builder
*/
public static ExampleMarketDataBuilder ofResource(String resourceRoot, ClassLoader classLoader) {
// classpath resources are forward-slash separated
String qualifiedRoot = resourceRoot;
qualifiedRoot = qualifiedRoot.startsWith("/") ? qualifiedRoot.substring(1) : qualifiedRoot;
qualifiedRoot = qualifiedRoot.startsWith("\\") ? qualifiedRoot.substring(1) : qualifiedRoot;
qualifiedRoot = qualifiedRoot.endsWith("/") ? qualifiedRoot : qualifiedRoot + "/";
URL url = classLoader.getResource(qualifiedRoot);
if (url == null) {
throw new IllegalArgumentException(Messages.format("Classpath resource not found: {}", qualifiedRoot));
}
if (url.getProtocol() != null && "jar".equals(url.getProtocol().toLowerCase(Locale.ENGLISH))) {
// Inside a JAR
int classSeparatorIdx = url.getFile().indexOf("!");
if (classSeparatorIdx == -1) {
throw new IllegalArgumentException(Messages.format("Unexpected JAR file URL: {}", url));
}
String jarPath = url.getFile().substring("file:".length(), classSeparatorIdx);
File jarFile;
try {
jarFile = new File(jarPath);
} catch (Exception e) {
throw new IllegalArgumentException(Messages.format("Unable to create file for JAR: {}", jarPath), e);
}
return new JarMarketDataBuilder(jarFile, resourceRoot);
} else {
// Resource is on disk
File file;
try {
file = new File(url.toURI());
} catch (URISyntaxException e) {
throw new IllegalArgumentException(Messages.format("Unexpected file location: {}", url), e);
}
return new DirectoryMarketDataBuilder(file.toPath());
}
}
/**
* Creates an instance from a given directory root.
*
* @param rootPath the root directory
* @return the market data builder
*/
public static ExampleMarketDataBuilder ofPath(Path rootPath) {
return new DirectoryMarketDataBuilder(rootPath);
}
//-------------------------------------------------------------------------
/**
* Builds a market data snapshot from this environment.
*
* @param marketDataDate the date of the market data
* @return the snapshot
*/
public ImmutableMarketData buildSnapshot(LocalDate marketDataDate) {
ImmutableMarketDataBuilder builder = ImmutableMarketData.builder(marketDataDate);
loadFixingSeries(builder);
loadRatesCurves(builder, marketDataDate);
loadQuotes(builder, marketDataDate);
loadFxRates(builder);
loadCreditMarketData(builder, marketDataDate);
return builder.build();
}
/**
* Gets the rates market lookup to use with this environment.
*
* @param marketDataDate the date of the market data
* @return the rates lookup
*/
public RatesMarketDataLookup ratesLookup(LocalDate marketDataDate) {
SortedMap<LocalDate, CurveGroup> curves = loadAllRatesCurves();
return RatesMarketDataLookup.of(curves.get(marketDataDate));
}
/**
* Gets all rates curves.
*
* @return the map of all rates curves
*/
public SortedMap<LocalDate, CurveGroup> loadAllRatesCurves() {
if (!subdirectoryExists(CURVES_DIR)) {
throw new IllegalArgumentException("No rates curves directory found");
}
ResourceLocator curveGroupsResource = getResource(CURVES_DIR, CURVES_GROUPS_FILE);
if (curveGroupsResource == null) {
throw new IllegalArgumentException(Messages.format(
"Unable to load rates curves: curve groups file not found at {}/{}", CURVES_DIR, CURVES_GROUPS_FILE));
}
ResourceLocator curveSettingsResource = getResource(CURVES_DIR, CURVES_SETTINGS_FILE);
if (curveSettingsResource == null) {
throw new IllegalArgumentException(Messages.format(
"Unable to load rates curves: curve settings file not found at {}/{}", CURVES_DIR, CURVES_SETTINGS_FILE));
}
ListMultimap<LocalDate, CurveGroup> curveGroups =
RatesCurvesCsvLoader.loadAllDates(curveGroupsResource, curveSettingsResource, getRatesCurvesResources());
// There is only one curve group in the market data file so this will always succeed
Map<LocalDate, CurveGroup> curveGroupMap = Maps.transformValues(curveGroups.asMap(), groups -> groups.iterator().next());
return new TreeMap<>(curveGroupMap);
}
//-------------------------------------------------------------------------
private void loadFixingSeries(ImmutableMarketDataBuilder builder) {
if (!subdirectoryExists(HISTORICAL_FIXINGS_DIR)) {
log.debug("No historical fixings directory found");
return;
}
try {
Collection<ResourceLocator> fixingSeriesResources = getAllResources(HISTORICAL_FIXINGS_DIR);
Map<ObservableId, LocalDateDoubleTimeSeries> fixingSeries = FixingSeriesCsvLoader.load(fixingSeriesResources);
builder.addTimeSeriesMap(fixingSeries);
} catch (Exception e) {
log.error("Error loading fixing series", e);
}
}
private void loadRatesCurves(ImmutableMarketDataBuilder builder, LocalDate marketDataDate) {
if (!subdirectoryExists(CURVES_DIR)) {
log.debug("No rates curves directory found");
return;
/**
* Creates an instance from a given classpath resource root location using the class loader
* which created this class.
* <p>
* This is designed to handle resource roots which may physically correspond to a directory on
* disk, or be located within a jar file.
*
* @param resourceRoot the resource root path
* @return the market data builder
*/
public static ExampleMarketDataBuilder ofResource(String resourceRoot) {
return ofResource(resourceRoot, ExampleMarketDataBuilder.class.getClassLoader());
}
ResourceLocator curveGroupsResource = getResource(CURVES_DIR, CURVES_GROUPS_FILE);
if (curveGroupsResource == null) {
log.error("Unable to load rates curves: curve groups file not found at {}/{}", CURVES_DIR, CURVES_GROUPS_FILE);
return;
/**
* Creates an instance from a given classpath resource root location, using the given class loader
* to find the resource.
* <p>
* This is designed to handle resource roots which may physically correspond to a directory on
* disk, or be located within a jar file.
*
* @param resourceRoot the resource root path
* @param classLoader the class loader with which to find the resource
* @return the market data builder
*/
public static ExampleMarketDataBuilder ofResource(String resourceRoot, ClassLoader classLoader) {
// classpath resources are forward-slash separated
String qualifiedRoot = resourceRoot;
qualifiedRoot = qualifiedRoot.startsWith("/") ? qualifiedRoot.substring(1) : qualifiedRoot;
qualifiedRoot = qualifiedRoot.startsWith("\\") ? qualifiedRoot.substring(1) : qualifiedRoot;
qualifiedRoot = qualifiedRoot.endsWith("/") ? qualifiedRoot : qualifiedRoot + "/";
URL url = classLoader.getResource(qualifiedRoot);
if (url == null) {
throw new IllegalArgumentException(Messages.format("Classpath resource not found: {}", qualifiedRoot));
}
if (url.getProtocol() != null && "jar".equals(url.getProtocol().toLowerCase(Locale.ENGLISH))) {
// Inside a JAR
int classSeparatorIdx = url.getFile().indexOf("!");
if (classSeparatorIdx == -1) {
throw new IllegalArgumentException(Messages.format("Unexpected JAR file URL: {}", url));
}
String jarPath = url.getFile().substring("file:".length(), classSeparatorIdx);
File jarFile;
try {
jarFile = new File(jarPath);
} catch (Exception e) {
throw new IllegalArgumentException(Messages.format("Unable to create file for JAR: {}", jarPath), e);
}
return new JarMarketDataBuilder(jarFile, resourceRoot);
} else {
// Resource is on disk
File file;
try {
file = new File(url.toURI());
} catch (URISyntaxException e) {
throw new IllegalArgumentException(Messages.format("Unexpected file location: {}", url), e);
}
return new DirectoryMarketDataBuilder(file.toPath());
}
}
ResourceLocator curveSettingsResource = getResource(CURVES_DIR, CURVES_SETTINGS_FILE);
if (curveSettingsResource == null) {
log.error("Unable to load rates curves: curve settings file not found at {}/{}", CURVES_DIR, CURVES_SETTINGS_FILE);
return;
}
try {
Collection<ResourceLocator> curvesResources = getRatesCurvesResources();
List<CurveGroup> ratesCurves =
RatesCurvesCsvLoader.load(marketDataDate, curveGroupsResource, curveSettingsResource, curvesResources);
for (CurveGroup group : ratesCurves) {
// add entry for higher level discount curve name
group.getDiscountCurves().forEach(
(ccy, curve) -> builder.addValue(CurveId.of(group.getName(), curve.getName()), curve));
// add entry for higher level forward curve name
group.getForwardCurves().forEach(
(idx, curve) -> builder.addValue(CurveId.of(group.getName(), curve.getName()), curve));
}
} catch (Exception e) {
log.error("Error loading rates curves", e);
}
}
// load quotes
private void loadQuotes(ImmutableMarketDataBuilder builder, LocalDate marketDataDate) {
if (!subdirectoryExists(QUOTES_DIR)) {
log.debug("No quotes directory found");
return;
/**
* Creates an instance from a given directory root.
*
* @param rootPath the root directory
* @return the market data builder
*/
public static ExampleMarketDataBuilder ofPath(Path rootPath) {
return new DirectoryMarketDataBuilder(rootPath);
}
ResourceLocator quotesResource = getResource(QUOTES_DIR, QUOTES_FILE);
if (quotesResource == null) {
log.error("Unable to load quotes: quotes file not found at {}/{}", QUOTES_DIR, QUOTES_FILE);
return;
//-------------------------------------------------------------------------
/**
* Builds a market data snapshot from this environment.
*
* @param marketDataDate the date of the market data
* @return the snapshot
*/
public ImmutableMarketData buildSnapshot(LocalDate marketDataDate) {
ImmutableMarketDataBuilder builder = ImmutableMarketData.builder(marketDataDate);
loadFixingSeries(builder);
loadRatesCurves(builder, marketDataDate);
loadQuotes(builder, marketDataDate);
loadFxRates(builder);
loadCreditMarketData(builder, marketDataDate);
return builder.build();
}
try {
Map<QuoteId, Double> quotes = QuotesCsvLoader.load(marketDataDate, quotesResource);
builder.addValueMap(quotes);
} catch (Exception ex) {
log.error("Error loading quotes", ex);
}
}
private void loadFxRates(ImmutableMarketDataBuilder builder) {
// TODO - load from CSV file - format to be defined
builder.addValue(FxRateId.of(Currency.GBP, Currency.USD), FxRate.of(Currency.GBP, Currency.USD, 1.61));
}
//-------------------------------------------------------------------------
private Collection<ResourceLocator> getRatesCurvesResources() {
return getAllResources(CURVES_DIR).stream()
.filter(res -> !res.getLocator().endsWith(CURVES_GROUPS_FILE))
.filter(res -> !res.getLocator().endsWith(CURVES_SETTINGS_FILE))
.collect(toImmutableList());
}
private void loadCreditMarketData(ImmutableMarketDataBuilder builder, LocalDate marketDataDate) {
if (!subdirectoryExists(CREDIT_DIR)) {
log.debug("No credit curves directory found");
return;
/**
* Gets the rates market lookup to use with this environment.
*
* @param marketDataDate the date of the market data
* @return the rates lookup
*/
public RatesMarketDataLookup ratesLookup(LocalDate marketDataDate) {
SortedMap<LocalDate, CurveGroup> curves = loadAllRatesCurves();
return RatesMarketDataLookup.of(curves.get(marketDataDate));
}
String creditMarketDataDateDirectory = String.format(
Locale.ENGLISH,
"%s/%s",
CREDIT_DIR,
marketDataDate.format(DateTimeFormatter.ISO_LOCAL_DATE));
/**
* Gets all rates curves.
*
* @return the map of all rates curves
*/
public SortedMap<LocalDate, CurveGroup> loadAllRatesCurves() {
if (!subdirectoryExists(CURVES_DIR)) {
throw new IllegalArgumentException("No rates curves directory found");
}
ResourceLocator curveGroupsResource = getResource(CURVES_DIR, CURVES_GROUPS_FILE);
if (curveGroupsResource == null) {
throw new IllegalArgumentException(Messages.format(
"Unable to load rates curves: curve groups file not found at {}/{}", CURVES_DIR, CURVES_GROUPS_FILE));
}
ResourceLocator curveSettingsResource = getResource(CURVES_DIR, CURVES_SETTINGS_FILE);
if (curveSettingsResource == null) {
throw new IllegalArgumentException(Messages.format(
"Unable to load rates curves: curve settings file not found at {}/{}", CURVES_DIR, CURVES_SETTINGS_FILE));
}
ListMultimap<LocalDate, CurveGroup> curveGroups =
RatesCurvesCsvLoader.loadAllDates(curveGroupsResource, curveSettingsResource, getRatesCurvesResources());
if (!subdirectoryExists(creditMarketDataDateDirectory)) {
log.debug("Unable to load market data: directory not found at {}", creditMarketDataDateDirectory);
return;
// There is only one curve group in the market data file so this will always succeed
Map<LocalDate, CurveGroup> curveGroupMap = Maps.transformValues(curveGroups.asMap(), groups -> groups.iterator().next());
return new TreeMap<>(curveGroupMap);
}
loadCdsYieldCurves(builder, creditMarketDataDateDirectory);
loadCdsSingleNameSpreadCurves(builder, creditMarketDataDateDirectory);
loadCdsIndexSpreadCurves(builder, creditMarketDataDateDirectory);
}
private void loadCdsYieldCurves(ImmutableMarketDataBuilder builder, String creditMarketDataDateDirectory) {
ResourceLocator cdsYieldCurvesResource = getResource(creditMarketDataDateDirectory, CDS_YIELD_CURVES_FILE);
if (cdsYieldCurvesResource == null) {
log.debug("Unable to load cds yield curves: file not found at {}/{}", creditMarketDataDateDirectory,
CDS_YIELD_CURVES_FILE);
return;
//-------------------------------------------------------------------------
private void loadFixingSeries(ImmutableMarketDataBuilder builder) {
if (!subdirectoryExists(HISTORICAL_FIXINGS_DIR)) {
log.debug("No historical fixings directory found");
return;
}
try {
Collection<ResourceLocator> fixingSeriesResources = getAllResources(HISTORICAL_FIXINGS_DIR);
Map<ObservableId, LocalDateDoubleTimeSeries> fixingSeries = FixingSeriesCsvLoader.load(fixingSeriesResources);
builder.addTimeSeriesMap(fixingSeries);
} catch (Exception e) {
log.error("Error loading fixing series", e);
}
}
CharSource inputSource = cdsYieldCurvesResource.getCharSource();
Map<IsdaYieldCurveInputsId, IsdaYieldCurveInputs> yieldCuves = MarkitYieldCurveDataParser.parse(inputSource);
private void loadRatesCurves(ImmutableMarketDataBuilder builder, LocalDate marketDataDate) {
if (!subdirectoryExists(CURVES_DIR)) {
log.debug("No rates curves directory found");
return;
}
for (IsdaYieldCurveInputsId id : yieldCuves.keySet()) {
IsdaYieldCurveInputs curveInputs = yieldCuves.get(id);
builder.addValue(id, curveInputs);
}
}
ResourceLocator curveGroupsResource = getResource(CURVES_DIR, CURVES_GROUPS_FILE);
if (curveGroupsResource == null) {
log.error("Unable to load rates curves: curve groups file not found at {}/{}", CURVES_DIR, CURVES_GROUPS_FILE);
return;
}
private void loadCdsSingleNameSpreadCurves(ImmutableMarketDataBuilder builder, String creditMarketDataDateDirectory) {
ResourceLocator singleNameCurvesResource = getResource(creditMarketDataDateDirectory, SINGLE_NAME_CREDIT_CURVES_FILE);
if (singleNameCurvesResource == null) {
log.debug("Unable to load single name spread curves: file not found at {}/{}", creditMarketDataDateDirectory,
SINGLE_NAME_CREDIT_CURVES_FILE);
return;
ResourceLocator curveSettingsResource = getResource(CURVES_DIR, CURVES_SETTINGS_FILE);
if (curveSettingsResource == null) {
log.error("Unable to load rates curves: curve settings file not found at {}/{}", CURVES_DIR, CURVES_SETTINGS_FILE);
return;
}
try {
Collection<ResourceLocator> curvesResources = getRatesCurvesResources();
List<CurveGroup> ratesCurves =
RatesCurvesCsvLoader.load(marketDataDate, curveGroupsResource, curveSettingsResource, curvesResources);
for (CurveGroup group : ratesCurves) {
// add entry for higher level discount curve name
group.getDiscountCurves().forEach(
(ccy, curve) -> builder.addValue(CurveId.of(group.getName(), curve.getName()), curve));
// add entry for higher level forward curve name
group.getForwardCurves().forEach(
(idx, curve) -> builder.addValue(CurveId.of(group.getName(), curve.getName()), curve));
}
} catch (Exception e) {
log.error("Error loading rates curves", e);
}
}
ResourceLocator singleNameStaticDataResource = getResource(creditMarketDataDateDirectory, SINGLE_NAME_STATIC_DATA_FILE);
if (singleNameStaticDataResource == null) {
log.debug("Unable to load single name static data: file not found at {}/{}", creditMarketDataDateDirectory,
SINGLE_NAME_STATIC_DATA_FILE);
return;
// load quotes
private void loadQuotes(ImmutableMarketDataBuilder builder, LocalDate marketDataDate) {
if (!subdirectoryExists(QUOTES_DIR)) {
log.debug("No quotes directory found");
return;
}
ResourceLocator quotesResource = getResource(QUOTES_DIR, QUOTES_FILE);
if (quotesResource == null) {
log.error("Unable to load quotes: quotes file not found at {}/{}", QUOTES_DIR, QUOTES_FILE);
return;
}
try {
Map<QuoteId, Double> quotes = QuotesCsvLoader.load(marketDataDate, quotesResource);
builder.addValueMap(quotes);
} catch (Exception ex) {
log.error("Error loading quotes", ex);
}
}
try {
CharSource inputCreditCurvesSource = singleNameCurvesResource.getCharSource();
CharSource inputStaticDataSource = singleNameStaticDataResource.getCharSource();
MarkitSingleNameCreditCurveDataParser.parse(builder, inputCreditCurvesSource, inputStaticDataSource);
} catch (Exception ex) {
throw new RuntimeException(String.format(
Locale.ENGLISH,
"Unable to read single name spread curves: exception at %s/%s",
creditMarketDataDateDirectory, SINGLE_NAME_CREDIT_CURVES_FILE), ex);
}
}
private void loadCdsIndexSpreadCurves(ImmutableMarketDataBuilder builder, String creditMarketDataDateDirectory) {
ResourceLocator inputCurvesResource = getResource(creditMarketDataDateDirectory, INDEX_CREDIT_CURVES_FILE);
if (inputCurvesResource == null) {
log.debug("Unable to load single name spread curves: file not found at {}/{}", creditMarketDataDateDirectory,
INDEX_CREDIT_CURVES_FILE);
return;
private void loadFxRates(ImmutableMarketDataBuilder builder) {
// TODO - load from CSV file - format to be defined
builder.addValue(FxRateId.of(Currency.GBP, Currency.USD), FxRate.of(Currency.GBP, Currency.USD, 1.61));
}
ResourceLocator inputStaticDataResource = getResource(creditMarketDataDateDirectory, INDEX_STATIC_DATA_FILE);
if (inputStaticDataResource == null) {
log.debug("Unable to load index static data: file not found at {}/{}", creditMarketDataDateDirectory,
INDEX_STATIC_DATA_FILE);
return;
//-------------------------------------------------------------------------
private Collection<ResourceLocator> getRatesCurvesResources() {
return getAllResources(CURVES_DIR).stream()
.filter(res -> !res.getLocator().endsWith(CURVES_GROUPS_FILE))
.filter(res -> !res.getLocator().endsWith(CURVES_SETTINGS_FILE))
.collect(toImmutableList());
}
CharSource indexCreditCurvesSource = inputCurvesResource.getCharSource();
CharSource indexStaticDataSource = inputStaticDataResource.getCharSource();
MarkitIndexCreditCurveDataParser.parse(builder, indexCreditCurvesSource, indexStaticDataSource);
private void loadCreditMarketData(ImmutableMarketDataBuilder builder, LocalDate marketDataDate) {
if (!subdirectoryExists(CREDIT_DIR)) {
log.debug("No credit curves directory found");
return;
}
}
String creditMarketDataDateDirectory = String.format(
Locale.ENGLISH,
"%s/%s",
CREDIT_DIR,
marketDataDate.format(DateTimeFormatter.ISO_LOCAL_DATE));
//-------------------------------------------------------------------------
/**
* Gets all available resources from a given subdirectory.
*
* @param subdirectoryName the name of the subdirectory
* @return a collection of locators for the resources in the subdirectory
*/
protected abstract Collection<ResourceLocator> getAllResources(String subdirectoryName);
if (!subdirectoryExists(creditMarketDataDateDirectory)) {
log.debug("Unable to load market data: directory not found at {}", creditMarketDataDateDirectory);
return;
}
/**
* Gets a specific resource from a given subdirectory.
*
* @param subdirectoryName the name of the subdirectory
* @param resourceName the name of the resource
* @return a locator for the requested resource
*/
protected abstract ResourceLocator getResource(String subdirectoryName, String resourceName);
loadCdsYieldCurves(builder, creditMarketDataDateDirectory);
loadCdsSingleNameSpreadCurves(builder, creditMarketDataDateDirectory);
loadCdsIndexSpreadCurves(builder, creditMarketDataDateDirectory);
}
/**
* Checks whether a specific subdirectory exists.
*
* @param subdirectoryName the name of the subdirectory
* @return whether the subdirectory exists
*/
protected abstract boolean subdirectoryExists(String subdirectoryName);
private void loadCdsYieldCurves(ImmutableMarketDataBuilder builder, String creditMarketDataDateDirectory) {
ResourceLocator cdsYieldCurvesResource = getResource(creditMarketDataDateDirectory, CDS_YIELD_CURVES_FILE);
if (cdsYieldCurvesResource == null) {
log.debug("Unable to load cds yield curves: file not found at {}/{}", creditMarketDataDateDirectory,
CDS_YIELD_CURVES_FILE);
return;
}
CharSource inputSource = cdsYieldCurvesResource.getCharSource();
Map<IsdaYieldCurveInputsId, IsdaYieldCurveInputs> yieldCuves = MarkitYieldCurveDataParser.parse(inputSource);
for (IsdaYieldCurveInputsId id : yieldCuves.keySet()) {
IsdaYieldCurveInputs curveInputs = yieldCuves.get(id);
builder.addValue(id, curveInputs);
}
}
private void loadCdsSingleNameSpreadCurves(ImmutableMarketDataBuilder builder, String creditMarketDataDateDirectory) {
ResourceLocator singleNameCurvesResource = getResource(creditMarketDataDateDirectory, SINGLE_NAME_CREDIT_CURVES_FILE);
if (singleNameCurvesResource == null) {
log.debug("Unable to load single name spread curves: file not found at {}/{}", creditMarketDataDateDirectory,
SINGLE_NAME_CREDIT_CURVES_FILE);
return;
}
ResourceLocator singleNameStaticDataResource = getResource(creditMarketDataDateDirectory, SINGLE_NAME_STATIC_DATA_FILE);
if (singleNameStaticDataResource == null) {
log.debug("Unable to load single name static data: file not found at {}/{}", creditMarketDataDateDirectory,
SINGLE_NAME_STATIC_DATA_FILE);
return;
}
try {
CharSource inputCreditCurvesSource = singleNameCurvesResource.getCharSource();
CharSource inputStaticDataSource = singleNameStaticDataResource.getCharSource();
MarkitSingleNameCreditCurveDataParser.parse(builder, inputCreditCurvesSource, inputStaticDataSource);
} catch (Exception ex) {
throw new RuntimeException(String.format(
Locale.ENGLISH,
"Unable to read single name spread curves: exception at %s/%s",
creditMarketDataDateDirectory, SINGLE_NAME_CREDIT_CURVES_FILE), ex);
}
}
private void loadCdsIndexSpreadCurves(ImmutableMarketDataBuilder builder, String creditMarketDataDateDirectory) {
ResourceLocator inputCurvesResource = getResource(creditMarketDataDateDirectory, INDEX_CREDIT_CURVES_FILE);
if (inputCurvesResource == null) {
log.debug("Unable to load single name spread curves: file not found at {}/{}", creditMarketDataDateDirectory,
INDEX_CREDIT_CURVES_FILE);
return;
}
ResourceLocator inputStaticDataResource = getResource(creditMarketDataDateDirectory, INDEX_STATIC_DATA_FILE);
if (inputStaticDataResource == null) {
log.debug("Unable to load index static data: file not found at {}/{}", creditMarketDataDateDirectory,
INDEX_STATIC_DATA_FILE);
return;
}
CharSource indexCreditCurvesSource = inputCurvesResource.getCharSource();
CharSource indexStaticDataSource = inputStaticDataResource.getCharSource();
MarkitIndexCreditCurveDataParser.parse(builder, indexCreditCurvesSource, indexStaticDataSource);
}
//-------------------------------------------------------------------------
/**
* Gets all available resources from a given subdirectory.
*
* @param subdirectoryName the name of the subdirectory
* @return a collection of locators for the resources in the subdirectory
*/
protected abstract Collection<ResourceLocator> getAllResources(String subdirectoryName);
/**
* Gets a specific resource from a given subdirectory.
*
* @param subdirectoryName the name of the subdirectory
* @param resourceName the name of the resource
* @return a locator for the requested resource
*/
protected abstract ResourceLocator getResource(String subdirectoryName, String resourceName);
/**
* Checks whether a specific subdirectory exists.
*
* @param subdirectoryName the name of the subdirectory
* @return whether the subdirectory exists
*/
protected abstract boolean subdirectoryExists(String subdirectoryName);
}

View File

@ -1,6 +1,6 @@
/**
* Copyright (C) 2015 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* <p>
* Please see distribution for license.
*/
package com.opengamma.strata.examples.marketdata;
@ -22,96 +22,96 @@ import java.util.stream.Collectors;
*/
public class JarMarketDataBuilder extends ExampleMarketDataBuilder {
/**
* The JAR file containing the expected structure of resources.
*/
private final File jarFile;
/**
* The root path to the resources within the JAR file.
*/
private final String rootPath;
/**
* A cache of JAR entries under the root path.
*/
private final ImmutableSet<String> entries;
/**
* The JAR file containing the expected structure of resources.
*/
private final File jarFile;
/**
* The root path to the resources within the JAR file.
*/
private final String rootPath;
/**
* A cache of JAR entries under the root path.
*/
private final ImmutableSet<String> entries;
/**
* Constructs an instance.
*
* @param jarFile the JAR file containing the expected structure of resources
* @param rootPath the root path to the resources within the JAR file
*/
public JarMarketDataBuilder(File jarFile, String rootPath) {
// classpath resources are forward-slash separated
String jarRoot = rootPath.startsWith("/") ? rootPath.substring(1) : rootPath;
if (!jarRoot.endsWith("/")) {
jarRoot += "/";
}
this.jarFile = jarFile;
this.rootPath = jarRoot;
this.entries = getEntries(jarFile, rootPath);
}
//-------------------------------------------------------------------------
@Override
protected Collection<ResourceLocator> getAllResources(String subdirectoryName) {
String resolvedSubdirectory = subdirectoryName + "/";
return entries.stream()
.filter(e -> e.startsWith(resolvedSubdirectory) && !e.equals(resolvedSubdirectory))
.map(e -> getEntryLocator(rootPath + e))
.collect(Collectors.toSet());
}
@Override
protected ResourceLocator getResource(String subdirectoryName, String resourceName) {
String fullLocation = String.format(Locale.ENGLISH, "%s%s/%s", rootPath, subdirectoryName, resourceName);
try (JarFile jar = new JarFile(jarFile)) {
JarEntry entry = jar.getJarEntry(fullLocation);
if (entry == null) {
return null;
}
return getEntryLocator(entry.getName());
} catch (Exception e) {
throw new IllegalArgumentException(
Messages.format("Error loading resource from JAR file: {}", jarFile), e);
}
}
@Override
protected boolean subdirectoryExists(String subdirectoryName) {
// classpath resources are forward-slash separated
String resolvedName = subdirectoryName.startsWith("/") ? subdirectoryName.substring(1) : subdirectoryName;
if (!resolvedName.endsWith("/")) {
resolvedName += "/";
}
return entries.contains(resolvedName);
}
//-------------------------------------------------------------------------
// Gets the resource locator corresponding to a given entry
private ResourceLocator getEntryLocator(String entryName) {
return ResourceLocator.of(ResourceLocator.CLASSPATH_URL_PREFIX + entryName);
}
private static ImmutableSet<String> getEntries(File jarFile, String rootPath) {
ImmutableSet.Builder<String> builder = ImmutableSet.builder();
try (JarFile jar = new JarFile(jarFile)) {
Enumeration<JarEntry> jarEntries = jar.entries();
while (jarEntries.hasMoreElements()) {
JarEntry entry = jarEntries.nextElement();
String entryName = entry.getName();
if (entryName.startsWith(rootPath) && !entryName.equals(rootPath)) {
String relativeEntryPath = entryName.substring(rootPath.length() + 1);
if (!relativeEntryPath.trim().isEmpty()) {
builder.add(relativeEntryPath);
}
/**
* Constructs an instance.
*
* @param jarFile the JAR file containing the expected structure of resources
* @param rootPath the root path to the resources within the JAR file
*/
public JarMarketDataBuilder(File jarFile, String rootPath) {
// classpath resources are forward-slash separated
String jarRoot = rootPath.startsWith("/") ? rootPath.substring(1) : rootPath;
if (!jarRoot.endsWith("/")) {
jarRoot += "/";
}
}
} catch (Exception e) {
throw new IllegalArgumentException(
Messages.format("Error scanning entries in JAR file: {}", jarFile), e);
this.jarFile = jarFile;
this.rootPath = jarRoot;
this.entries = getEntries(jarFile, rootPath);
}
//-------------------------------------------------------------------------
@Override
protected Collection<ResourceLocator> getAllResources(String subdirectoryName) {
String resolvedSubdirectory = subdirectoryName + "/";
return entries.stream()
.filter(e -> e.startsWith(resolvedSubdirectory) && !e.equals(resolvedSubdirectory))
.map(e -> getEntryLocator(rootPath + e))
.collect(Collectors.toSet());
}
@Override
protected ResourceLocator getResource(String subdirectoryName, String resourceName) {
String fullLocation = String.format(Locale.ENGLISH, "%s%s/%s", rootPath, subdirectoryName, resourceName);
try (JarFile jar = new JarFile(jarFile)) {
JarEntry entry = jar.getJarEntry(fullLocation);
if (entry == null) {
return null;
}
return getEntryLocator(entry.getName());
} catch (Exception e) {
throw new IllegalArgumentException(
Messages.format("Error loading resource from JAR file: {}", jarFile), e);
}
}
@Override
protected boolean subdirectoryExists(String subdirectoryName) {
// classpath resources are forward-slash separated
String resolvedName = subdirectoryName.startsWith("/") ? subdirectoryName.substring(1) : subdirectoryName;
if (!resolvedName.endsWith("/")) {
resolvedName += "/";
}
return entries.contains(resolvedName);
}
//-------------------------------------------------------------------------
// Gets the resource locator corresponding to a given entry
private ResourceLocator getEntryLocator(String entryName) {
return ResourceLocator.of(ResourceLocator.CLASSPATH_URL_PREFIX + entryName);
}
private static ImmutableSet<String> getEntries(File jarFile, String rootPath) {
ImmutableSet.Builder<String> builder = ImmutableSet.builder();
try (JarFile jar = new JarFile(jarFile)) {
Enumeration<JarEntry> jarEntries = jar.entries();
while (jarEntries.hasMoreElements()) {
JarEntry entry = jarEntries.nextElement();
String entryName = entry.getName();
if (entryName.startsWith(rootPath) && !entryName.equals(rootPath)) {
String relativeEntryPath = entryName.substring(rootPath.length() + 1);
if (!relativeEntryPath.trim().isEmpty()) {
builder.add(relativeEntryPath);
}
}
}
} catch (Exception e) {
throw new IllegalArgumentException(
Messages.format("Error scanning entries in JAR file: {}", jarFile), e);
}
return builder.build();
}
return builder.build();
}
}

View File

@ -1,6 +1,6 @@
/**
* Copyright (C) 2015 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* <p>
* Please see distribution for license.
*/
package com.opengamma.strata.examples.marketdata.credit.markit;
@ -44,217 +44,219 @@ import java.util.Map;
*/
public class MarkitIndexCreditCurveDataParser {
// Markit date format with the month in full caps. e.g. 11-JUL-14
private static final DateTimeFormatter DATE_FORMAT = new DateTimeFormatterBuilder()
.parseCaseInsensitive().appendPattern("dd-MMM-uu").toFormatter(Locale.ENGLISH);
// Markit date format with the month in full caps. e.g. 11-JUL-14
private static final DateTimeFormatter DATE_FORMAT = new DateTimeFormatterBuilder()
.parseCaseInsensitive().appendPattern("dd-MMM-uu").toFormatter(Locale.ENGLISH);
enum Columns {
enum Columns {
Series("Series"),
Version("Version"),
Term("Term"),
RedCode("RED Code"),
Maturity("Maturity"),
CompositeSpread("Composite Spread"),
ModelSpread("Model Spread");
Series("Series"),
Version("Version"),
Term("Term"),
RedCode("RED Code"),
Maturity("Maturity"),
CompositeSpread("Composite Spread"),
ModelSpread("Model Spread");
private final String columnName;
private final String columnName;
Columns(String columnName) {
this.columnName = columnName;
}
public String getColumnName() {
return columnName;
}
}
/**
* Parses the specified sources.
*
* @param builder the market data builder that the resulting curve and recovery rate items should be loaded into
* @param curveSource the source of curve data to parse
* @param staticDataSource the source of static data to parse
*/
public static void parse(
ImmutableMarketDataBuilder builder,
CharSource curveSource,
CharSource staticDataSource) {
Map<IsdaIndexCreditCurveInputsId, List<Point>> curveData = Maps.newHashMap();
Map<MarkitRedCode, StaticData> staticDataMap = parseStaticData(staticDataSource);
CsvFile csv = CsvFile.of(curveSource, true);
for (CsvRow row : csv.rows()) {
String seriesText = row.getField(Columns.Series.getColumnName());
String versionText = row.getField(Columns.Version.getColumnName());
String termText = row.getField(Columns.Term.getColumnName());
String redCodeText = row.getField(Columns.RedCode.getColumnName());
String maturityText = row.getField(Columns.Maturity.getColumnName());
String compositeSpreadText = row.getField(Columns.CompositeSpread.getColumnName());
String modelSpreadText = row.getField(Columns.ModelSpread.getColumnName());
StandardId indexId = MarkitRedCode.id(redCodeText);
int indexSeries = Integer.parseInt(seriesText);
int indexAnnexVersion = Integer.parseInt(versionText);
IsdaIndexCreditCurveInputsId id = IsdaIndexCreditCurveInputsId.of(
IndexReferenceInformation.of(
indexId,
indexSeries,
indexAnnexVersion));
Tenor term = Tenor.parse(termText);
LocalDate maturity = LocalDate.parse(maturityText, DATE_FORMAT);
double spread;
if (compositeSpreadText.isEmpty()) {
if (modelSpreadText.isEmpty()) {
// there is no rate for this row, continue
continue;
Columns(String columnName) {
this.columnName = columnName;
}
// fall back to the model rate is the composite is missing
spread = parseRate(modelSpreadText);
} else {
// prefer the composite rate if it is present
spread = parseRate(compositeSpreadText);
}
List<Point> points = curveData.get(id);
if (points == null) {
points = Lists.newArrayList();
curveData.put(id, points);
}
points.add(new Point(term, maturity, spread));
public String getColumnName() {
return columnName;
}
}
for (IsdaIndexCreditCurveInputsId curveId : curveData.keySet()) {
MarkitRedCode redCode = MarkitRedCode.from(curveId.getReferenceInformation().getIndexId());
StaticData staticData = staticDataMap.get(redCode);
ArgChecker.notNull(staticData, "Did not find a static data record for " + redCode);
CdsConvention convention = staticData.getConvention();
double recoveryRate = staticData.getRecoveryRate();
double indexFactor = staticData.getIndexFactor();
// TODO add fromDate handling
/**
* Parses the specified sources.
*
* @param builder the market data builder that the resulting curve and recovery rate items should be loaded into
* @param curveSource the source of curve data to parse
* @param staticDataSource the source of static data to parse
*/
public static void parse(
ImmutableMarketDataBuilder builder,
CharSource curveSource,
CharSource staticDataSource) {
String creditCurveName = curveId.toString();
Map<IsdaIndexCreditCurveInputsId, List<Point>> curveData = Maps.newHashMap();
Map<MarkitRedCode, StaticData> staticDataMap = parseStaticData(staticDataSource);
List<Point> points = curveData.get(curveId);
CsvFile csv = CsvFile.of(curveSource, true);
for (CsvRow row : csv.rows()) {
String seriesText = row.getField(Columns.Series.getColumnName());
String versionText = row.getField(Columns.Version.getColumnName());
String termText = row.getField(Columns.Term.getColumnName());
String redCodeText = row.getField(Columns.RedCode.getColumnName());
String maturityText = row.getField(Columns.Maturity.getColumnName());
String compositeSpreadText = row.getField(Columns.CompositeSpread.getColumnName());
String modelSpreadText = row.getField(Columns.ModelSpread.getColumnName());
Period[] periods = points.stream().map(s -> s.getTenor().getPeriod()).toArray(Period[]::new);
LocalDate[] endDates = points.stream().map(s -> s.getDate()).toArray(LocalDate[]::new);
double[] rates = points.stream().mapToDouble(s -> s.getRate()).toArray();
StandardId indexId = MarkitRedCode.id(redCodeText);
int indexSeries = Integer.parseInt(seriesText);
int indexAnnexVersion = Integer.parseInt(versionText);
IsdaCreditCurveInputs curveInputs = IsdaCreditCurveInputs.of(
CurveName.of(creditCurveName),
periods,
endDates,
rates,
convention,
indexFactor);
IsdaIndexCreditCurveInputsId id = IsdaIndexCreditCurveInputsId.of(
IndexReferenceInformation.of(
indexId,
indexSeries,
indexAnnexVersion));
builder.addValue(curveId, curveInputs);
Tenor term = Tenor.parse(termText);
LocalDate maturity = LocalDate.parse(maturityText, DATE_FORMAT);
IsdaIndexRecoveryRateId recoveryRateId = IsdaIndexRecoveryRateId.of(curveId.getReferenceInformation());
CdsRecoveryRate cdsRecoveryRate = CdsRecoveryRate.of(recoveryRate);
double spread;
if (compositeSpreadText.isEmpty()) {
if (modelSpreadText.isEmpty()) {
// there is no rate for this row, continue
continue;
}
// fall back to the model rate is the composite is missing
spread = parseRate(modelSpreadText);
} else {
// prefer the composite rate if it is present
spread = parseRate(compositeSpreadText);
}
builder.addValue(recoveryRateId, cdsRecoveryRate);
}
}
List<Point> points = curveData.get(id);
if (points == null) {
points = Lists.newArrayList();
curveData.put(id, points);
}
points.add(new Point(term, maturity, spread));
}
// parses the static data file
private static Map<MarkitRedCode, StaticData> parseStaticData(CharSource source) {
CsvFile csv = CsvFile.of(source, true);
for (IsdaIndexCreditCurveInputsId curveId : curveData.keySet()) {
MarkitRedCode redCode = MarkitRedCode.from(curveId.getReferenceInformation().getIndexId());
StaticData staticData = staticDataMap.get(redCode);
ArgChecker.notNull(staticData, "Did not find a static data record for " + redCode);
CdsConvention convention = staticData.getConvention();
double recoveryRate = staticData.getRecoveryRate();
double indexFactor = staticData.getIndexFactor();
// TODO add fromDate handling
Map<MarkitRedCode, StaticData> result = Maps.newHashMap();
for (CsvRow row : csv.rows()) {
String redCodeText = row.getField("RedCode");
String fromDateText = row.getField("From Date");
String conventionText = row.getField("Convention");
String recoveryRateText = row.getField("Recovery Rate");
String indexFactorText = row.getField("Index Factor");
String creditCurveName = curveId.toString();
MarkitRedCode redCode = MarkitRedCode.of(redCodeText);
LocalDate fromDate = LocalDate.parse(fromDateText, DATE_FORMAT);
CdsConvention convention = CdsConvention.of(conventionText);
double recoveryRate = parseRate(recoveryRateText);
double indexFactor = Double.parseDouble(indexFactorText);
List<Point> points = curveData.get(curveId);
result.put(redCode, new StaticData(fromDate, convention, recoveryRate, indexFactor));
}
return result;
}
Period[] periods = points.stream().map(s -> s.getTenor().getPeriod()).toArray(Period[]::new);
LocalDate[] endDates = points.stream().map(s -> s.getDate()).toArray(LocalDate[]::new);
double[] rates = points.stream().mapToDouble(s -> s.getRate()).toArray();
//-------------------------------------------------------------------------
/**
* Stores the parsed static data.
*/
private static class StaticData {
IsdaCreditCurveInputs curveInputs = IsdaCreditCurveInputs.of(
CurveName.of(creditCurveName),
periods,
endDates,
rates,
convention,
indexFactor);
private LocalDate fromDate;
private CdsConvention convention;
private double recoveryRate;
private double indexFactor;
builder.addValue(curveId, curveInputs);
private StaticData(LocalDate fromDate, CdsConvention convention, double recoveryRate, double indexFactor) {
this.fromDate = fromDate;
this.convention = convention;
this.recoveryRate = recoveryRate;
this.indexFactor = indexFactor;
IsdaIndexRecoveryRateId recoveryRateId = IsdaIndexRecoveryRateId.of(curveId.getReferenceInformation());
CdsRecoveryRate cdsRecoveryRate = CdsRecoveryRate.of(recoveryRate);
builder.addValue(recoveryRateId, cdsRecoveryRate);
}
}
@SuppressWarnings("unused")
public LocalDate getFromDate() {
return fromDate;
// parses the static data file
private static Map<MarkitRedCode, StaticData> parseStaticData(CharSource source) {
CsvFile csv = CsvFile.of(source, true);
Map<MarkitRedCode, StaticData> result = Maps.newHashMap();
for (CsvRow row : csv.rows()) {
String redCodeText = row.getField("RedCode");
String fromDateText = row.getField("From Date");
String conventionText = row.getField("Convention");
String recoveryRateText = row.getField("Recovery Rate");
String indexFactorText = row.getField("Index Factor");
MarkitRedCode redCode = MarkitRedCode.of(redCodeText);
LocalDate fromDate = LocalDate.parse(fromDateText, DATE_FORMAT);
CdsConvention convention = CdsConvention.of(conventionText);
double recoveryRate = parseRate(recoveryRateText);
double indexFactor = Double.parseDouble(indexFactorText);
result.put(redCode, new StaticData(fromDate, convention, recoveryRate, indexFactor));
}
return result;
}
public CdsConvention getConvention() {
return convention;
//-------------------------------------------------------------------------
/**
* Stores the parsed static data.
*/
private static class StaticData {
private LocalDate fromDate;
private CdsConvention convention;
private double recoveryRate;
private double indexFactor;
private StaticData(LocalDate fromDate, CdsConvention convention, double recoveryRate, double indexFactor) {
this.fromDate = fromDate;
this.convention = convention;
this.recoveryRate = recoveryRate;
this.indexFactor = indexFactor;
}
@SuppressWarnings("unused")
public LocalDate getFromDate() {
return fromDate;
}
public CdsConvention getConvention() {
return convention;
}
public double getRecoveryRate() {
return recoveryRate;
}
public double getIndexFactor() {
return indexFactor;
}
}
public double getRecoveryRate() {
return recoveryRate;
//-------------------------------------------------------------------------
/**
* Stores the parsed data points.
*/
private static class Point {
private final Tenor tenor;
private final LocalDate date;
private final double rate;
private Point(Tenor tenor, LocalDate date, double rate) {
this.tenor = tenor;
this.date = date;
this.rate = rate;
}
public Tenor getTenor() {
return tenor;
}
public LocalDate getDate() {
return date;
}
public double getRate() {
return rate;
}
}
public double getIndexFactor() {
return indexFactor;
// Converts from a string percentage rate with a percent sign to a double rate
// e.g. 0.12% => 0.0012d
private static double parseRate(String input) {
return Double.parseDouble(input.replace("%", "")) / 100d;
}
}
//-------------------------------------------------------------------------
/**
* Stores the parsed data points.
*/
private static class Point {
private final Tenor tenor;
private final LocalDate date;
private final double rate;
private Point(Tenor tenor, LocalDate date, double rate) {
this.tenor = tenor;
this.date = date;
this.rate = rate;
}
public Tenor getTenor() {
return tenor;
}
public LocalDate getDate() {
return date;
}
public double getRate() {
return rate;
}
}
// Converts from a string percentage rate with a percent sign to a double rate
// e.g. 0.12% => 0.0012d
private static double parseRate(String input) {
return Double.parseDouble(input.replace("%", "")) / 100d;
}
}

View File

@ -1,6 +1,6 @@
/**
* Copyright (C) 2015 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* <p>
* Please see distribution for license.
*/
package com.opengamma.strata.examples.marketdata.credit.markit;
@ -19,72 +19,74 @@ import org.joda.convert.FromString;
* http://www.markit.com/product/reference-data-cds
*/
public final class MarkitRedCode
extends TypedString<MarkitRedCode> {
extends TypedString<MarkitRedCode> {
/**
* Serialization version.
*/
private static final long serialVersionUID = 1L;
/**
* Serialization version.
*/
private static final long serialVersionUID = 1L;
/**
* Scheme used in an OpenGamma {@link StandardId} where the value is a Markit RED code.
*/
public static final String MARKIT_REDCODE_SCHEME = "MarkitRedCode";
/**
* Scheme used in an OpenGamma {@link StandardId} where the value is a Markit RED code.
*/
public static final String MARKIT_REDCODE_SCHEME = "MarkitRedCode";
//-------------------------------------------------------------------------
/**
* Obtains an instance from the specified name.
* <p>
* RED codes must be 6 or 9 characters long.
*
* @param name the name of the field
* @return a RED code
*/
@FromString
public static MarkitRedCode of(String name) {
ArgChecker.isTrue(name.length() == 6 || name.length() == 9, "RED Code must be exactly 6 or 9 characters");
return new MarkitRedCode(name);
}
//-------------------------------------------------------------------------
/**
* Converts from a standard identifier ensuring the scheme is correct.
*
* @param id standard id identifying a RED code
* @return the equivalent RED code
*/
public static MarkitRedCode from(StandardId id) {
Preconditions.checkArgument(id.getScheme().equals(MARKIT_REDCODE_SCHEME));
return MarkitRedCode.of(id.getValue());
}
/**
* Obtains an instance from the specified name.
* <p>
* RED codes must be 6 or 9 characters long.
*
* @param name the name of the field
* @return a RED code
*/
@FromString
public static MarkitRedCode of(String name) {
ArgChecker.isTrue(name.length() == 6 || name.length() == 9, "RED Code must be exactly 6 or 9 characters");
return new MarkitRedCode(name);
}
/**
* Creates a standard identifier using the correct Markit RED code scheme.
*
* @param name the Markit RED code, 6 or 9 characters long
* @return the equivalent standard identifier
*/
public static StandardId id(String name) {
ArgChecker.isTrue(name.length() == 6 || name.length() == 9, "RED Code must be exactly 6 or 9 characters");
return StandardId.of(MARKIT_REDCODE_SCHEME, name);
}
/**
* Converts from a standard identifier ensuring the scheme is correct.
*
* @param id standard id identifying a RED code
* @return the equivalent RED code
*/
public static MarkitRedCode from(StandardId id) {
Preconditions.checkArgument(id.getScheme().equals(MARKIT_REDCODE_SCHEME));
return MarkitRedCode.of(id.getValue());
}
/**
* Creates an instance.
*
* @param name the RED code
*/
private MarkitRedCode(String name) {
super(name);
}
/**
* Creates a standard identifier using the correct Markit RED code scheme.
*
* @param name the Markit RED code, 6 or 9 characters long
* @return the equivalent standard identifier
*/
public static StandardId id(String name) {
ArgChecker.isTrue(name.length() == 6 || name.length() == 9, "RED Code must be exactly 6 or 9 characters");
return StandardId.of(MARKIT_REDCODE_SCHEME, name);
}
//-------------------------------------------------------------------------
/**
* Converts this RED code to a standard identifier.
*
* @return the standard identifier
*/
public StandardId toStandardId() {
return StandardId.of(MARKIT_REDCODE_SCHEME, getName());
}
/**
* Creates an instance.
*
* @param name the RED code
*/
private MarkitRedCode(String name) {
super(name);
}
//-------------------------------------------------------------------------
/**
* Converts this RED code to a standard identifier.
*
* @return the standard identifier
*/
public StandardId toStandardId() {
return StandardId.of(MARKIT_REDCODE_SCHEME, getName());
}
}

View File

@ -1,6 +1,6 @@
/**
* Copyright (C) 2015 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* <p>
* Please see distribution for license.
*/
package com.opengamma.strata.examples.marketdata.credit.markit;
@ -14,96 +14,97 @@ import com.opengamma.strata.product.credit.RestructuringClause;
*/
public enum MarkitRestructuringClause {
/**
* Modified-Modified Restructuring 2003.
*/
MM,
/**
* Modified-Modified Restructuring 2014.
*/
MM14,
/**
* Modified Restructuring 2003.
*/
MR,
/**
* Modified Restructuring 2014.
*/
MR14,
/**
* Cum/Old/Full Restructuring 2003.
*/
CR,
/**
* Cum/Old/Full Restructuring 2014.
*/
CR14,
/**
* Ex/No restructuring 2003.
*/
XR,
/**
* Ex/No restructuring 2014.
*/
XR14;
/**
* Modified-Modified Restructuring 2003.
*/
MM,
/**
* Modified-Modified Restructuring 2014.
*/
MM14,
/**
* Modified Restructuring 2003.
*/
MR,
/**
* Modified Restructuring 2014.
*/
MR14,
/**
* Cum/Old/Full Restructuring 2003.
*/
CR,
/**
* Cum/Old/Full Restructuring 2014.
*/
CR14,
/**
* Ex/No restructuring 2003.
*/
XR,
/**
* Ex/No restructuring 2014.
*/
XR14;
//-------------------------------------------------------------------------
/**
* Converts Markit code to standard restructuring clause.
*
* @return the converted clause
*/
public RestructuringClause translate() {
switch (this) {
case MM:
return RestructuringClause.MOD_MOD_RESTRUCTURING_2003;
case MM14:
return RestructuringClause.MOD_MOD_RESTRUCTURING_2014;
case MR:
return RestructuringClause.MODIFIED_RESTRUCTURING_2003;
case MR14:
return RestructuringClause.MODIFIED_RESTRUCTURING_2014;
case CR:
return RestructuringClause.CUM_RESTRUCTURING_2003;
case CR14:
return RestructuringClause.CUM_RESTRUCTURING_2014;
case XR:
return RestructuringClause.NO_RESTRUCTURING_2003;
case XR14:
return RestructuringClause.NO_RESTRUCTURING_2014;
default:
throw new IllegalStateException("Unmapped restructuring clause. Do not have mapping for " + this);
//-------------------------------------------------------------------------
/**
* Converts Markit code to standard restructuring clause.
*
* @return the converted clause
*/
public RestructuringClause translate() {
switch (this) {
case MM:
return RestructuringClause.MOD_MOD_RESTRUCTURING_2003;
case MM14:
return RestructuringClause.MOD_MOD_RESTRUCTURING_2014;
case MR:
return RestructuringClause.MODIFIED_RESTRUCTURING_2003;
case MR14:
return RestructuringClause.MODIFIED_RESTRUCTURING_2014;
case CR:
return RestructuringClause.CUM_RESTRUCTURING_2003;
case CR14:
return RestructuringClause.CUM_RESTRUCTURING_2014;
case XR:
return RestructuringClause.NO_RESTRUCTURING_2003;
case XR14:
return RestructuringClause.NO_RESTRUCTURING_2014;
default:
throw new IllegalStateException("Unmapped restructuring clause. Do not have mapping for " + this);
}
}
}
/**
* Converts restructuring clause to Markit equivalent.
*
* @param restructuringClause the clause to convert
* @return the converted clause
*/
public static MarkitRestructuringClause from(RestructuringClause restructuringClause) {
switch (restructuringClause) {
case MOD_MOD_RESTRUCTURING_2003:
return MM;
case MOD_MOD_RESTRUCTURING_2014:
return MM14;
case MODIFIED_RESTRUCTURING_2003:
return MR;
case MODIFIED_RESTRUCTURING_2014:
return MR14;
case CUM_RESTRUCTURING_2003:
return CR;
case CUM_RESTRUCTURING_2014:
return CR14;
case NO_RESTRUCTURING_2003:
return XR;
case NO_RESTRUCTURING_2014:
return XR14;
default:
throw new UnsupportedOperationException("Unknown restructuring clause. Do not have mapping for " + restructuringClause);
/**
* Converts restructuring clause to Markit equivalent.
*
* @param restructuringClause the clause to convert
* @return the converted clause
*/
public static MarkitRestructuringClause from(RestructuringClause restructuringClause) {
switch (restructuringClause) {
case MOD_MOD_RESTRUCTURING_2003:
return MM;
case MOD_MOD_RESTRUCTURING_2014:
return MM14;
case MODIFIED_RESTRUCTURING_2003:
return MR;
case MODIFIED_RESTRUCTURING_2014:
return MR14;
case CUM_RESTRUCTURING_2003:
return CR;
case CUM_RESTRUCTURING_2014:
return CR14;
case NO_RESTRUCTURING_2003:
return XR;
case NO_RESTRUCTURING_2014:
return XR14;
default:
throw new UnsupportedOperationException("Unknown restructuring clause. Do not have mapping for " + restructuringClause);
}
}
}
}

View File

@ -1,6 +1,6 @@
/**
* Copyright (C) 2015 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* <p>
* Please see distribution for license.
*/
package com.opengamma.strata.examples.marketdata.credit.markit;
@ -14,75 +14,76 @@ import com.opengamma.strata.product.credit.SeniorityLevel;
*/
public enum MarkitSeniorityLevel {
/**
* Senior domestic.
*/
SECDOM,
/**
* Senior domestic.
*/
SECDOM,
/**
* Senior foreign.
*/
SNRFOR,
/**
* Senior foreign.
*/
SNRFOR,
/**
* Subordinate, Lower Tier 2.
*/
SUBLT2,
/**
* Subordinate, Lower Tier 2.
*/
SUBLT2,
/**
* Subordinate Tier 1.
*/
PREFT1,
/**
* Subordinate Tier 1.
*/
PREFT1,
/**
* Subordinate, Upper Tier 2.
*/
JRSUBUT2;
/**
* Subordinate, Upper Tier 2.
*/
JRSUBUT2;
//-------------------------------------------------------------------------
/**
* Converts Markit code to standard seniority level.
*
* @return the converted level
*/
public SeniorityLevel translate() {
switch (this) {
case SECDOM:
return SeniorityLevel.SENIOR_SECURED_DOMESTIC;
case SNRFOR:
return SeniorityLevel.SENIOR_UNSECURED_FOREIGN;
case SUBLT2:
return SeniorityLevel.SUBORDINATE_LOWER_TIER_2;
case PREFT1:
return SeniorityLevel.SUBORDINATE_TIER_1;
case JRSUBUT2:
return SeniorityLevel.SUBORDINATE_UPPER_TIER_2;
default:
throw new IllegalStateException("Unmapped seniority level. Do not have mapping for " + this);
//-------------------------------------------------------------------------
/**
* Converts Markit code to standard seniority level.
*
* @return the converted level
*/
public SeniorityLevel translate() {
switch (this) {
case SECDOM:
return SeniorityLevel.SENIOR_SECURED_DOMESTIC;
case SNRFOR:
return SeniorityLevel.SENIOR_UNSECURED_FOREIGN;
case SUBLT2:
return SeniorityLevel.SUBORDINATE_LOWER_TIER_2;
case PREFT1:
return SeniorityLevel.SUBORDINATE_TIER_1;
case JRSUBUT2:
return SeniorityLevel.SUBORDINATE_UPPER_TIER_2;
default:
throw new IllegalStateException("Unmapped seniority level. Do not have mapping for " + this);
}
}
}
/**
* Converts seniority level to Markit equivalent.
*
* @param seniorityLevel the level to convert
* @return the converted level
*/
public static MarkitSeniorityLevel from(SeniorityLevel seniorityLevel) {
switch (seniorityLevel) {
case SENIOR_SECURED_DOMESTIC:
return SECDOM;
case SENIOR_UNSECURED_FOREIGN:
return SNRFOR;
case SUBORDINATE_LOWER_TIER_2:
return SUBLT2;
case SUBORDINATE_TIER_1:
return PREFT1;
case SUBORDINATE_UPPER_TIER_2:
return JRSUBUT2;
default:
throw new IllegalArgumentException("Unknown seniority level. Do not have mapping for " + seniorityLevel);
/**
* Converts seniority level to Markit equivalent.
*
* @param seniorityLevel the level to convert
* @return the converted level
*/
public static MarkitSeniorityLevel from(SeniorityLevel seniorityLevel) {
switch (seniorityLevel) {
case SENIOR_SECURED_DOMESTIC:
return SECDOM;
case SENIOR_UNSECURED_FOREIGN:
return SNRFOR;
case SUBORDINATE_LOWER_TIER_2:
return SUBLT2;
case SUBORDINATE_TIER_1:
return PREFT1;
case SUBORDINATE_UPPER_TIER_2:
return JRSUBUT2;
default:
throw new IllegalArgumentException("Unknown seniority level. Do not have mapping for " + seniorityLevel);
}
}
}
}

View File

@ -1,6 +1,6 @@
/**
* Copyright (C) 2015 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* <p>
* Please see distribution for license.
*/
package com.opengamma.strata.examples.marketdata.credit.markit;
@ -56,141 +56,141 @@ import java.util.Scanner;
*/
public class MarkitSingleNameCreditCurveDataParser {
// Markit date format with the month in full caps. e.g. 11-JUL-14
private static final DateTimeFormatter DATE_FORMAT = new DateTimeFormatterBuilder()
.parseCaseInsensitive().appendPattern("dd-MMM-uu").toFormatter(Locale.ENGLISH);
// Markit date format with the month in full caps. e.g. 11-JUL-14
private static final DateTimeFormatter DATE_FORMAT = new DateTimeFormatterBuilder()
.parseCaseInsensitive().appendPattern("dd-MMM-uu").toFormatter(Locale.ENGLISH);
// Index used to access the specified columns of string data in the file
private static final int DATE = 0;
private static final int RED_CODE = 3;
private static final int TIER = 4;
private static final int CURRENCY = 5;
private static final int DOCS_CLAUSE = 6;
private static final int FIRST_SPREAD_COLUMN = 8;
private static final int RECOVERY = 19;
// Index used to access the specified columns of string data in the file
private static final int DATE = 0;
private static final int RED_CODE = 3;
private static final int TIER = 4;
private static final int CURRENCY = 5;
private static final int DOCS_CLAUSE = 6;
private static final int FIRST_SPREAD_COLUMN = 8;
private static final int RECOVERY = 19;
private static final List<Tenor> TENORS = ImmutableList.of(
Tenor.TENOR_6M,
Tenor.TENOR_1Y,
Tenor.TENOR_2Y,
Tenor.TENOR_3Y,
Tenor.TENOR_4Y,
Tenor.TENOR_5Y,
Tenor.TENOR_7Y,
Tenor.TENOR_10Y,
Tenor.TENOR_15Y,
Tenor.TENOR_20Y,
Tenor.TENOR_30Y);
private static final List<Tenor> TENORS = ImmutableList.of(
Tenor.TENOR_6M,
Tenor.TENOR_1Y,
Tenor.TENOR_2Y,
Tenor.TENOR_3Y,
Tenor.TENOR_4Y,
Tenor.TENOR_5Y,
Tenor.TENOR_7Y,
Tenor.TENOR_10Y,
Tenor.TENOR_15Y,
Tenor.TENOR_20Y,
Tenor.TENOR_30Y);
/**
* Parses the specified sources.
*
* @param builder the market data builder that the resulting curve and recovery rate items should be loaded into
* @param curveSource the source of curve data to parse
* @param staticDataSource the source of static data to parse
*/
public static void parse(
ImmutableMarketDataBuilder builder,
CharSource curveSource,
CharSource staticDataSource) {
/**
* Parses the specified sources.
*
* @param builder the market data builder that the resulting curve and recovery rate items should be loaded into
* @param curveSource the source of curve data to parse
* @param staticDataSource the source of static data to parse
*/
public static void parse(
ImmutableMarketDataBuilder builder,
CharSource curveSource,
CharSource staticDataSource) {
Map<MarkitRedCode, CdsConvention> conventions = parseStaticData(staticDataSource);
try (Scanner scanner = new Scanner(curveSource.openStream())) {
while (scanner.hasNextLine()) {
Map<MarkitRedCode, CdsConvention> conventions = parseStaticData(staticDataSource);
try (Scanner scanner = new Scanner(curveSource.openStream())) {
while (scanner.hasNextLine()) {
String line = scanner.nextLine();
// skip over header rows
if (line.startsWith("V5 CDS Composites by Convention") ||
line.trim().isEmpty() ||
line.startsWith("\"Date\",")) {
continue;
String line = scanner.nextLine();
// skip over header rows
if (line.startsWith("V5 CDS Composites by Convention") ||
line.trim().isEmpty() ||
line.startsWith("\"Date\",")) {
continue;
}
String[] columns = line.split(",");
for (int i = 0; i < columns.length; i++) {
// get rid of quotes and trim the string
columns[i] = columns[i].replaceFirst("^\"", "").replaceFirst("\"$", "").trim();
}
LocalDate valuationDate = LocalDate.parse(columns[DATE], DATE_FORMAT);
MarkitRedCode redCode = MarkitRedCode.of(columns[RED_CODE]);
SeniorityLevel seniorityLevel = MarkitSeniorityLevel.valueOf(columns[TIER]).translate();
Currency currency = Currency.parse(columns[CURRENCY]);
RestructuringClause restructuringClause = MarkitRestructuringClause.valueOf(columns[DOCS_CLAUSE]).translate();
double recoveryRate = parseRate(columns[RECOVERY]);
SingleNameReferenceInformation referenceInformation = SingleNameReferenceInformation.of(
redCode.toStandardId(),
seniorityLevel,
currency,
restructuringClause);
IsdaSingleNameCreditCurveInputsId curveId = IsdaSingleNameCreditCurveInputsId.of(referenceInformation);
List<Period> periodsList = Lists.newArrayList();
List<Double> ratesList = Lists.newArrayList();
for (int i = 0; i < TENORS.size(); i++) {
String rateString = columns[FIRST_SPREAD_COLUMN + i];
if (rateString.isEmpty()) {
// no data at this point
continue;
}
periodsList.add(TENORS.get(i).getPeriod());
ratesList.add(parseRate(rateString));
}
String creditCurveName = curveId.toString();
CdsConvention cdsConvention = conventions.get(redCode);
Period[] periods = periodsList.stream().toArray(Period[]::new);
LocalDate[] endDates = Lists
.newArrayList(periods)
.stream()
.map(p -> cdsConvention.calculateUnadjustedMaturityDateFromValuationDate(valuationDate, p))
.toArray(LocalDate[]::new);
double[] rates = ratesList.stream().mapToDouble(s -> s).toArray();
double unitScalingFactor = 1d; // for single name, we don't do any scaling (no index factor)
IsdaCreditCurveInputs curveInputs = IsdaCreditCurveInputs.of(
CurveName.of(creditCurveName),
periods,
endDates,
rates,
cdsConvention,
unitScalingFactor);
builder.addValue(curveId, curveInputs);
IsdaSingleNameRecoveryRateId recoveryRateId = IsdaSingleNameRecoveryRateId.of(referenceInformation);
CdsRecoveryRate cdsRecoveryRate = CdsRecoveryRate.of(recoveryRate);
builder.addValue(recoveryRateId, cdsRecoveryRate);
}
} catch (IOException ex) {
throw new UncheckedIOException(ex);
}
String[] columns = line.split(",");
for (int i = 0; i < columns.length; i++) {
// get rid of quotes and trim the string
columns[i] = columns[i].replaceFirst("^\"", "").replaceFirst("\"$", "").trim();
}
LocalDate valuationDate = LocalDate.parse(columns[DATE], DATE_FORMAT);
MarkitRedCode redCode = MarkitRedCode.of(columns[RED_CODE]);
SeniorityLevel seniorityLevel = MarkitSeniorityLevel.valueOf(columns[TIER]).translate();
Currency currency = Currency.parse(columns[CURRENCY]);
RestructuringClause restructuringClause = MarkitRestructuringClause.valueOf(columns[DOCS_CLAUSE]).translate();
double recoveryRate = parseRate(columns[RECOVERY]);
SingleNameReferenceInformation referenceInformation = SingleNameReferenceInformation.of(
redCode.toStandardId(),
seniorityLevel,
currency,
restructuringClause);
IsdaSingleNameCreditCurveInputsId curveId = IsdaSingleNameCreditCurveInputsId.of(referenceInformation);
List<Period> periodsList = Lists.newArrayList();
List<Double> ratesList = Lists.newArrayList();
for (int i = 0; i < TENORS.size(); i++) {
String rateString = columns[FIRST_SPREAD_COLUMN + i];
if (rateString.isEmpty()) {
// no data at this point
continue;
}
periodsList.add(TENORS.get(i).getPeriod());
ratesList.add(parseRate(rateString));
}
String creditCurveName = curveId.toString();
CdsConvention cdsConvention = conventions.get(redCode);
Period[] periods = periodsList.stream().toArray(Period[]::new);
LocalDate[] endDates = Lists
.newArrayList(periods)
.stream()
.map(p -> cdsConvention.calculateUnadjustedMaturityDateFromValuationDate(valuationDate, p))
.toArray(LocalDate[]::new);
double[] rates = ratesList.stream().mapToDouble(s -> s).toArray();
double unitScalingFactor = 1d; // for single name, we don't do any scaling (no index factor)
IsdaCreditCurveInputs curveInputs = IsdaCreditCurveInputs.of(
CurveName.of(creditCurveName),
periods,
endDates,
rates,
cdsConvention,
unitScalingFactor);
builder.addValue(curveId, curveInputs);
IsdaSingleNameRecoveryRateId recoveryRateId = IsdaSingleNameRecoveryRateId.of(referenceInformation);
CdsRecoveryRate cdsRecoveryRate = CdsRecoveryRate.of(recoveryRate);
builder.addValue(recoveryRateId, cdsRecoveryRate);
}
} catch (IOException ex) {
throw new UncheckedIOException(ex);
}
}
// parses the static data file of RED code to convention
private static Map<MarkitRedCode, CdsConvention> parseStaticData(CharSource source) {
CsvFile csv = CsvFile.of(source, true);
Map<MarkitRedCode, CdsConvention> result = Maps.newHashMap();
for (CsvRow row : csv.rows()) {
String redCodeText = row.getField("RedCode");
String conventionText = row.getField("Convention");
result.put(MarkitRedCode.of(redCodeText), CdsConvention.of(conventionText));
// parses the static data file of RED code to convention
private static Map<MarkitRedCode, CdsConvention> parseStaticData(CharSource source) {
CsvFile csv = CsvFile.of(source, true);
Map<MarkitRedCode, CdsConvention> result = Maps.newHashMap();
for (CsvRow row : csv.rows()) {
String redCodeText = row.getField("RedCode");
String conventionText = row.getField("Convention");
result.put(MarkitRedCode.of(redCodeText), CdsConvention.of(conventionText));
}
return result;
}
return result;
}
// Converts from a string percentage rate with a percent sign to a double rate
// e.g. 0.12% => 0.0012d
private static double parseRate(String input) {
return Double.parseDouble(input.replace("%", "")) / 100d;
}
// Converts from a string percentage rate with a percent sign to a double rate
// e.g. 0.12% => 0.0012d
private static double parseRate(String input) {
return Double.parseDouble(input.replace("%", "")) / 100d;
}
}

View File

@ -1,6 +1,6 @@
/**
* Copyright (C) 2015 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* <p>
* Please see distribution for license.
*/
package com.opengamma.strata.examples.marketdata.credit.markit;
@ -31,104 +31,105 @@ import java.util.Map;
*/
public class MarkitYieldCurveDataParser {
private static final String DATE = "Valuation Date";
private static final String TENOR = "Tenor";
private static final String INSTRUMENT = "Instrument Type";
private static final String RATE = "Rate";
private static final String CONVENTION = "Curve Convention";
private static final String DATE = "Valuation Date";
private static final String TENOR = "Tenor";
private static final String INSTRUMENT = "Instrument Type";
private static final String RATE = "Rate";
private static final String CONVENTION = "Curve Convention";
/**
* Parses the specified source.
*
* @param source the source to parse
* @return the map of parsed yield curve par rates
*/
public static Map<IsdaYieldCurveInputsId, IsdaYieldCurveInputs> parse(CharSource source) {
// parse the curve data
Map<IsdaYieldCurveConvention, List<Point>> curveData = Maps.newHashMap();
CsvFile csv = CsvFile.of(source, true);
for (CsvRow row : csv.rows()) {
String dateText = row.getField(DATE);
String tenorText = row.getField(TENOR);
String instrumentText = row.getField(INSTRUMENT);
String rateText = row.getField(RATE);
String conventionText = row.getField(CONVENTION);
/**
* Parses the specified source.
*
* @param source the source to parse
* @return the map of parsed yield curve par rates
*/
public static Map<IsdaYieldCurveInputsId, IsdaYieldCurveInputs> parse(CharSource source) {
// parse the curve data
Map<IsdaYieldCurveConvention, List<Point>> curveData = Maps.newHashMap();
CsvFile csv = CsvFile.of(source, true);
for (CsvRow row : csv.rows()) {
String dateText = row.getField(DATE);
String tenorText = row.getField(TENOR);
String instrumentText = row.getField(INSTRUMENT);
String rateText = row.getField(RATE);
String conventionText = row.getField(CONVENTION);
Point point = new Point(
Tenor.parse(tenorText),
LocalDate.parse(dateText, DateTimeFormatter.ISO_LOCAL_DATE),
mapUnderlyingType(instrumentText),
Double.parseDouble(rateText));
IsdaYieldCurveConvention convention = IsdaYieldCurveConvention.of(conventionText);
Point point = new Point(
Tenor.parse(tenorText),
LocalDate.parse(dateText, DateTimeFormatter.ISO_LOCAL_DATE),
mapUnderlyingType(instrumentText),
Double.parseDouble(rateText));
IsdaYieldCurveConvention convention = IsdaYieldCurveConvention.of(conventionText);
List<Point> points = curveData.get(convention);
if (points == null) {
points = Lists.newArrayList();
curveData.put(convention, points);
}
points.add(point);
List<Point> points = curveData.get(convention);
if (points == null) {
points = Lists.newArrayList();
curveData.put(convention, points);
}
points.add(point);
}
// convert the curve data into the result map
Map<IsdaYieldCurveInputsId, IsdaYieldCurveInputs> result = Maps.newHashMap();
for (IsdaYieldCurveConvention convention : curveData.keySet()) {
List<Point> points = curveData.get(convention);
result.put(IsdaYieldCurveInputsId.of(convention.getCurrency()),
IsdaYieldCurveInputs.of(
CurveName.of(convention.getName()),
points.stream().map(s -> s.getTenor().getPeriod()).toArray(Period[]::new),
points.stream().map(s -> s.getDate()).toArray(LocalDate[]::new),
points.stream().map(s -> s.getInstrumentType()).toArray(IsdaYieldCurveUnderlyingType[]::new),
points.stream().mapToDouble(s -> s.getRate()).toArray(),
convention));
}
return result;
}
// convert the curve data into the result map
Map<IsdaYieldCurveInputsId, IsdaYieldCurveInputs> result = Maps.newHashMap();
for (IsdaYieldCurveConvention convention : curveData.keySet()) {
List<Point> points = curveData.get(convention);
result.put(IsdaYieldCurveInputsId.of(convention.getCurrency()),
IsdaYieldCurveInputs.of(
CurveName.of(convention.getName()),
points.stream().map(s -> s.getTenor().getPeriod()).toArray(Period[]::new),
points.stream().map(s -> s.getDate()).toArray(LocalDate[]::new),
points.stream().map(s -> s.getInstrumentType()).toArray(IsdaYieldCurveUnderlyingType[]::new),
points.stream().mapToDouble(s -> s.getRate()).toArray(),
convention));
}
return result;
}
// parse the M/S instrument type flag
private static IsdaYieldCurveUnderlyingType mapUnderlyingType(String type) {
switch (type) {
case "M":
return IsdaYieldCurveUnderlyingType.ISDA_MONEY_MARKET;
case "S":
return IsdaYieldCurveUnderlyingType.ISDA_SWAP;
default:
throw new IllegalStateException("Unknown underlying type, only M or S allowed: " + type);
}
}
//-------------------------------------------------------------------------
/**
* Stores the parsed data points.
*/
private static class Point {
private final Tenor tenor;
private final LocalDate date;
private final IsdaYieldCurveUnderlyingType instrumentType;
private final double rate;
private Point(Tenor tenor, LocalDate baseDate, IsdaYieldCurveUnderlyingType instrumentType, double rate) {
this.tenor = tenor;
this.date = baseDate.plus(tenor.getPeriod());
this.instrumentType = instrumentType;
this.rate = rate;
// parse the M/S instrument type flag
private static IsdaYieldCurveUnderlyingType mapUnderlyingType(String type) {
switch (type) {
case "M":
return IsdaYieldCurveUnderlyingType.ISDA_MONEY_MARKET;
case "S":
return IsdaYieldCurveUnderlyingType.ISDA_SWAP;
default:
throw new IllegalStateException("Unknown underlying type, only M or S allowed: " + type);
}
}
public Tenor getTenor() {
return tenor;
}
//-------------------------------------------------------------------------
public LocalDate getDate() {
return date;
}
/**
* Stores the parsed data points.
*/
private static class Point {
private final Tenor tenor;
private final LocalDate date;
private final IsdaYieldCurveUnderlyingType instrumentType;
private final double rate;
public IsdaYieldCurveUnderlyingType getInstrumentType() {
return instrumentType;
}
private Point(Tenor tenor, LocalDate baseDate, IsdaYieldCurveUnderlyingType instrumentType, double rate) {
this.tenor = tenor;
this.date = baseDate.plus(tenor.getPeriod());
this.instrumentType = instrumentType;
this.rate = rate;
}
public double getRate() {
return rate;
public Tenor getTenor() {
return tenor;
}
public LocalDate getDate() {
return date;
}
public IsdaYieldCurveUnderlyingType getInstrumentType() {
return instrumentType;
}
public double getRate() {
return rate;
}
}
}
}

View File

@ -1,7 +1,9 @@
/**
* Copyright (C) 2016 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* <p>
* Please see distribution for license.
* <p>
* Credit market data for examples.
*/
/**

View File

@ -1,7 +1,9 @@
/**
* Copyright (C) 2016 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* <p>
* Please see distribution for license.
* <p>
* Market data for examples.
*/
/**

View File

@ -254,13 +254,14 @@ class PortfolioApi(val rpc: CordaRPCOps) {
val parties = rpc.networkMapSnapshot()
val notaries = rpc.notaryIdentities()
// TODO We are not able to filter by network map node now
val counterParties = parties.filterNot { it.legalIdentities.any { it in notaries }
|| ownParty in it.legalIdentities
val counterParties = parties.filterNot {
it.legalIdentities.any { it in notaries }
|| ownParty in it.legalIdentities
}
return AvailableParties(
self = ApiParty(ownParty.owningKey.toBase58String(), ownParty.name),
// TODO It will show all identities including service identities.
counterparties = counterParties.flatMap { it.legalIdentitiesAndCerts.map { ApiParty(it.owningKey.toBase58String(), it.name) }}
counterparties = counterParties.flatMap { it.legalIdentitiesAndCerts.map { ApiParty(it.owningKey.toBase58String(), it.name) } }
)
}

View File

@ -70,6 +70,7 @@ object SimmFlow {
private val existing: StateAndRef<PortfolioState>?)
: FlowLogic<RevisionedState<PortfolioState.Update>>() {
constructor(otherParty: Party, valuationDate: LocalDate) : this(otherParty, valuationDate, null)
lateinit var notary: Party
lateinit var otherPartySession: FlowSession
@ -318,7 +319,7 @@ object SimmFlow {
logger.info("Handshake finished, awaiting Simm update")
replyToSession.send(Ack) // Hack to state that this party is ready.
subFlow(object : StateRevisionFlow.Receiver<PortfolioState.Update>(replyToSession) {
override fun verifyProposal(stx:SignedTransaction, proposal: Proposal<PortfolioState.Update>) {
override fun verifyProposal(stx: SignedTransaction, proposal: Proposal<PortfolioState.Update>) {
super.verifyProposal(stx, proposal)
if (proposal.modification.portfolio != portfolio.refs) throw StateReplacementException()
}

View File

@ -15,7 +15,7 @@ import net.corda.vega.contracts.RevisionedState
*/
object StateRevisionFlow {
open class Requester<T>(curStateRef: StateAndRef<RevisionedState<T>>,
updatedData: T) : AbstractStateReplacementFlow.Instigator<RevisionedState<T>, RevisionedState<T>, T>(curStateRef, updatedData) {
updatedData: T) : AbstractStateReplacementFlow.Instigator<RevisionedState<T>, RevisionedState<T>, T>(curStateRef, updatedData) {
override fun assembleTx(): AbstractStateReplacementFlow.UpgradeTx {
val state = originalState.state.data
val tx = state.generateRevision(originalState.state.notary, originalState, modification)

View File

@ -48,97 +48,97 @@ import static java.util.stream.Collectors.toList;
*/
public class SwapExampleX {
public static final LocalDate VALUATION_DATE = LocalDate.of(2016, 6, 6);
public static final LocalDate VALUATION_DATE = LocalDate.of(2016, 6, 6);
public static void main(String[] args) {
CurveGroupDefinition curveGroupDefinition = loadCurveGroup();
MarketData marketData = loadMarketData();
List<SwapTrade> trades = ImmutableList.of(createVanillaFixedVsLibor3mSwap(), createVanillaFixedVsLibor6mSwap());
CurveCalibrator calibrator = CurveCalibrator.of(1e-9, 1e-9, 100, CalibrationMeasures.PAR_SPREAD);
ImmutableRatesProvider ratesProvider = calibrator.calibrate(curveGroupDefinition, marketData, ReferenceData.standard());
MarketDataFxRateProvider fxRateProvider = MarketDataFxRateProvider.of(marketData);
ImmutableRatesProvider combinedRatesProvider = ImmutableRatesProvider.combined(fxRateProvider, ratesProvider);
public static void main(String[] args) {
CurveGroupDefinition curveGroupDefinition = loadCurveGroup();
MarketData marketData = loadMarketData();
List<SwapTrade> trades = ImmutableList.of(createVanillaFixedVsLibor3mSwap(), createVanillaFixedVsLibor6mSwap());
CurveCalibrator calibrator = CurveCalibrator.of(1e-9, 1e-9, 100, CalibrationMeasures.PAR_SPREAD);
ImmutableRatesProvider ratesProvider = calibrator.calibrate(curveGroupDefinition, marketData, ReferenceData.standard());
MarketDataFxRateProvider fxRateProvider = MarketDataFxRateProvider.of(marketData);
ImmutableRatesProvider combinedRatesProvider = ImmutableRatesProvider.combined(fxRateProvider, ratesProvider);
List<ResolvedSwapTrade> resolvedTrades = trades.stream().map(trade -> trade.resolve(ReferenceData.standard())).collect(toList());
DiscountingSwapProductPricer pricer = DiscountingSwapProductPricer.DEFAULT;
List<ResolvedSwapTrade> resolvedTrades = trades.stream().map(trade -> trade.resolve(ReferenceData.standard())).collect(toList());
DiscountingSwapProductPricer pricer = DiscountingSwapProductPricer.DEFAULT;
CurrencyParameterSensitivities totalSensitivities = CurrencyParameterSensitivities.empty();
MultiCurrencyAmount totalCurrencyExposure = MultiCurrencyAmount.empty();
CurrencyParameterSensitivities totalSensitivities = CurrencyParameterSensitivities.empty();
MultiCurrencyAmount totalCurrencyExposure = MultiCurrencyAmount.empty();
for (ResolvedSwapTrade resolvedTrade : resolvedTrades) {
ResolvedSwap swap = resolvedTrade.getProduct();
for (ResolvedSwapTrade resolvedTrade : resolvedTrades) {
ResolvedSwap swap = resolvedTrade.getProduct();
PointSensitivities pointSensitivities = pricer.presentValueSensitivity(swap, combinedRatesProvider).build();
CurrencyParameterSensitivities sensitivities = combinedRatesProvider.parameterSensitivity(pointSensitivities);
MultiCurrencyAmount currencyExposure = pricer.currencyExposure(swap, combinedRatesProvider);
PointSensitivities pointSensitivities = pricer.presentValueSensitivity(swap, combinedRatesProvider).build();
CurrencyParameterSensitivities sensitivities = combinedRatesProvider.parameterSensitivity(pointSensitivities);
MultiCurrencyAmount currencyExposure = pricer.currencyExposure(swap, combinedRatesProvider);
totalSensitivities = totalSensitivities.combinedWith(sensitivities);
totalCurrencyExposure = totalCurrencyExposure.plus(currencyExposure);
totalSensitivities = totalSensitivities.combinedWith(sensitivities);
totalCurrencyExposure = totalCurrencyExposure.plus(currencyExposure);
}
//PortfolioNormalizer normalizer = new PortfolioNormalizer(Currency.EUR, combinedRatesProvider);
//RwamBimmNotProductClassesCalculator calculatorTotal = new RwamBimmNotProductClassesCalculator(
// fxRateProvider,
// Currency.EUR,
// IsdaConfiguration.INSTANCE);
//
//Triple<Double, Double, Double> margin = BimmAnalysisUtils.computeMargin(
// combinedRatesProvider,
// normalizer,
// calculatorTotal,
// totalSensitivities,
// totalCurrencyExposure);
//
//System.out.println(margin);
}
//PortfolioNormalizer normalizer = new PortfolioNormalizer(Currency.EUR, combinedRatesProvider);
//RwamBimmNotProductClassesCalculator calculatorTotal = new RwamBimmNotProductClassesCalculator(
// fxRateProvider,
// Currency.EUR,
// IsdaConfiguration.INSTANCE);
//
//Triple<Double, Double, Double> margin = BimmAnalysisUtils.computeMargin(
// combinedRatesProvider,
// normalizer,
// calculatorTotal,
// totalSensitivities,
// totalCurrencyExposure);
//
//System.out.println(margin);
}
//--------------------------------------------------------------------------------------------------
//--------------------------------------------------------------------------------------------------
/**
* Load the market quotes and FX rates from data files.
*/
private static MarketData loadMarketData() {
Path dataDir = Paths.get("src/test/resources/data");
Path quotesFile = dataDir.resolve("BIMM-MARKET-QUOTES-20160606.csv");
Path fxFile = dataDir.resolve("BIMM-FX-RATES-20160606.csv");
/**
* Load the market quotes and FX rates from data files.
*/
private static MarketData loadMarketData() {
Path dataDir = Paths.get("src/test/resources/data");
Path quotesFile = dataDir.resolve("BIMM-MARKET-QUOTES-20160606.csv");
Path fxFile = dataDir.resolve("BIMM-FX-RATES-20160606.csv");
Map<QuoteId, Double> quotes = QuotesCsvLoader.load(VALUATION_DATE, ImmutableList.of(ResourceLocator.ofPath(quotesFile)));
Map<FxRateId, FxRate> fxRates = FxRatesCsvLoader.load(VALUATION_DATE, ResourceLocator.ofPath(fxFile));
return ImmutableMarketData.builder(VALUATION_DATE).addValueMap(quotes).addValueMap(fxRates).build();
}
Map<QuoteId, Double> quotes = QuotesCsvLoader.load(VALUATION_DATE, ImmutableList.of(ResourceLocator.ofPath(quotesFile)));
Map<FxRateId, FxRate> fxRates = FxRatesCsvLoader.load(VALUATION_DATE, ResourceLocator.ofPath(fxFile));
return ImmutableMarketData.builder(VALUATION_DATE).addValueMap(quotes).addValueMap(fxRates).build();
}
/**
* Loads the curve group definition from data files.
*
* A curve group maps from curve name to index for forward curves and curve name to currency for discount curves.
*/
private static CurveGroupDefinition loadCurveGroup() {
Path settingsDir = Paths.get("src/test/resources/settings");
Map<CurveGroupName, CurveGroupDefinition> curveGroups = RatesCalibrationCsvLoader.load(
ResourceLocator.ofPath(settingsDir.resolve("BIMM-groups-EUR.csv")),
ResourceLocator.ofPath(settingsDir.resolve("BIMM-settings-EUR.csv")),
ResourceLocator.ofPath(settingsDir.resolve("BIMM-nodes-EUR.csv")));
return curveGroups.get(CurveGroupName.of("BIMM"));
}
/**
* Loads the curve group definition from data files.
* <p>
* A curve group maps from curve name to index for forward curves and curve name to currency for discount curves.
*/
private static CurveGroupDefinition loadCurveGroup() {
Path settingsDir = Paths.get("src/test/resources/settings");
Map<CurveGroupName, CurveGroupDefinition> curveGroups = RatesCalibrationCsvLoader.load(
ResourceLocator.ofPath(settingsDir.resolve("BIMM-groups-EUR.csv")),
ResourceLocator.ofPath(settingsDir.resolve("BIMM-settings-EUR.csv")),
ResourceLocator.ofPath(settingsDir.resolve("BIMM-nodes-EUR.csv")));
return curveGroups.get(CurveGroupName.of("BIMM"));
}
//--------------------------------------------------------------------------------------------------
//--------------------------------------------------------------------------------------------------
private static SwapTrade createVanillaFixedVsLibor3mSwap() {
return FixedIborSwapConventions.EUR_FIXED_1Y_EURIBOR_3M.createTrade(
VALUATION_DATE,
Tenor.TENOR_4Y,
BuySell.BUY,
200_000_000,
0.015,
ReferenceData.standard());
}
private static SwapTrade createVanillaFixedVsLibor3mSwap() {
return FixedIborSwapConventions.EUR_FIXED_1Y_EURIBOR_3M.createTrade(
VALUATION_DATE,
Tenor.TENOR_4Y,
BuySell.BUY,
200_000_000,
0.015,
ReferenceData.standard());
}
private static SwapTrade createVanillaFixedVsLibor6mSwap() {
return FixedIborSwapConventions.EUR_FIXED_1Y_EURIBOR_6M.createTrade(
VALUATION_DATE,
Tenor.TENOR_10Y,
BuySell.SELL,
100_000_000,
0.013,
ReferenceData.standard());
}
private static SwapTrade createVanillaFixedVsLibor6mSwap() {
return FixedIborSwapConventions.EUR_FIXED_1Y_EURIBOR_6M.createTrade(
VALUATION_DATE,
Tenor.TENOR_10Y,
BuySell.SELL,
100_000_000,
0.013,
ReferenceData.standard());
}
}

View File

@ -27,19 +27,21 @@ import java.util.*
* Interface for communicating with nodes running the trader demo.
*/
class TraderDemoClientApi(val rpc: CordaRPCOps) {
val cashCount: Long get() {
val count = builder { VaultSchemaV1.VaultStates::recordedTime.count() }
val countCriteria = QueryCriteria.VaultCustomQueryCriteria(count)
return rpc.vaultQueryBy<Cash.State>(countCriteria).otherResults.single() as Long
}
val cashCount: Long
get() {
val count = builder { VaultSchemaV1.VaultStates::recordedTime.count() }
val countCriteria = QueryCriteria.VaultCustomQueryCriteria(count)
return rpc.vaultQueryBy<Cash.State>(countCriteria).otherResults.single() as Long
}
val dollarCashBalance: Amount<Currency> get() = rpc.getCashBalance(USD)
val commercialPaperCount: Long get() {
val count = builder { VaultSchemaV1.VaultStates::recordedTime.count() }
val countCriteria = QueryCriteria.VaultCustomQueryCriteria(count)
return rpc.vaultQueryBy<CommercialPaper.State>(countCriteria).otherResults.single() as Long
}
val commercialPaperCount: Long
get() {
val count = builder { VaultSchemaV1.VaultStates::recordedTime.count() }
val countCriteria = QueryCriteria.VaultCustomQueryCriteria(count)
return rpc.vaultQueryBy<CommercialPaper.State>(countCriteria).otherResults.single() as Long
}
fun runIssuer(amount: Amount<Currency>, buyerName: CordaX500Name, sellerName: CordaX500Name) {
val ref = OpaqueBytes.of(1)

View File

@ -31,7 +31,9 @@ class CommercialPaperIssueFlow(private val amount: Amount<Currency>,
companion object {
val PROSPECTUS_HASH = SecureHash.parse("decd098666b9657314870e192ced0c3519c2c9d395507a238338f8d003929de9")
object ISSUING : ProgressTracker.Step("Issuing and timestamping some commercial paper")
fun tracker() = ProgressTracker(ISSUING)
}

View File

@ -39,11 +39,13 @@ class PredefinedTestNode internal constructor(party: Party, driver: DriverDSLExp
* for it: you won't have [ALICE_KEY].
*/
fun DriverDSLExposedInterface.alice(): PredefinedTestNode = PredefinedTestNode(ALICE, this, null)
/**
* Returns a plain, entirely stock node pre-configured with the [BOB] identity. Note that a random key will be generated
* for it: you won't have [BOB_KEY].
*/
fun DriverDSLExposedInterface.bob(): PredefinedTestNode = PredefinedTestNode(BOB, this, null)
/**
* Returns a plain single node notary pre-configured with the [DUMMY_NOTARY] identity. Note that a random key will be generated
* for it: you won't have [DUMMY_NOTARY_KEY].

View File

@ -20,7 +20,8 @@ import java.nio.file.Path
* Creates and tests a ledger built by the passed in dsl. The provided services can be customised, otherwise a default
* of a freshly built [MockServices] is used.
*/
@JvmOverloads fun ledger(
@JvmOverloads
fun ledger(
services: ServiceHub = MockServices(),
initialiseSerialization: Boolean = true,
dsl: LedgerDSL<TestTransactionDSLInterpreter, TestLedgerDSLInterpreter>.() -> Unit
@ -40,7 +41,8 @@ import java.nio.file.Path
*
* @see LedgerDSLInterpreter._transaction
*/
@JvmOverloads fun transaction(
@JvmOverloads
fun transaction(
transactionLabel: String? = null,
transactionBuilder: TransactionBuilder = TransactionBuilder(notary = DUMMY_NOTARY),
initialiseSerialization: Boolean = true,
@ -54,6 +56,7 @@ fun testNodeConfiguration(
myLegalName: CordaX500Name,
notaryConfig: NotaryConfig? = null): NodeConfiguration {
abstract class MockableNodeConfiguration : NodeConfiguration // Otherwise Mockito is defeated by val getters.
val nc = spy<MockableNodeConfiguration>()
whenever(nc.baseDirectory).thenReturn(baseDirectory)
whenever(nc.myLegalName).thenReturn(myLegalName)

View File

@ -64,7 +64,7 @@ interface RPCDriverExposedDSLInterface : DriverDSLExposedInterface {
maxFileSize: Int = ArtemisMessagingServer.MAX_FILE_SIZE,
maxBufferedBytesPerClient: Long = 10L * ArtemisMessagingServer.MAX_FILE_SIZE,
configuration: RPCServerConfiguration = RPCServerConfiguration.default,
ops : I
ops: I
): CordaFuture<RpcServerHandle>
/**
@ -110,8 +110,8 @@ interface RPCDriverExposedDSLInterface : DriverDSLExposedInterface {
maxBufferedBytesPerClient: Long = 10L * ArtemisMessagingServer.MAX_FILE_SIZE,
configuration: RPCServerConfiguration = RPCServerConfiguration.default,
customPort: NetworkHostAndPort? = null,
ops : I
) : CordaFuture<RpcServerHandle>
ops: I
): CordaFuture<RpcServerHandle>
/**
* Starts a Netty RPC client.
@ -180,16 +180,19 @@ interface RPCDriverExposedDSLInterface : DriverDSLExposedInterface {
brokerHandle: RpcBrokerHandle
): RpcServerHandle
}
inline fun <reified I : RPCOps> RPCDriverExposedDSLInterface.startInVmRpcClient(
username: String = rpcTestUser.username,
password: String = rpcTestUser.password,
configuration: RPCClientConfiguration = RPCClientConfiguration.default
) = startInVmRpcClient(I::class.java, username, password, configuration)
inline fun <reified I : RPCOps> RPCDriverExposedDSLInterface.startRandomRpcClient(
hostAndPort: NetworkHostAndPort,
username: String = rpcTestUser.username,
password: String = rpcTestUser.password
) = startRandomRpcClient(I::class.java, hostAndPort, username, password)
inline fun <reified I : RPCOps> RPCDriverExposedDSLInterface.startRpcClient(
rpcAddress: NetworkHostAndPort,
username: String = rpcTestUser.username,
@ -200,7 +203,8 @@ inline fun <reified I : RPCOps> RPCDriverExposedDSLInterface.startRpcClient(
interface RPCDriverInternalDSLInterface : DriverDSLInternalInterface, RPCDriverExposedDSLInterface
data class RpcBrokerHandle(
val hostAndPort: NetworkHostAndPort?,/** null if this is an InVM broker */
val hostAndPort: NetworkHostAndPort?,
/** null if this is an InVM broker */
val clientTransportConfiguration: TransportConfiguration,
val serverControl: ActiveMQServerControl
)
@ -253,6 +257,7 @@ private class SingleUserSecurityManager(val rpcUser: User) : ActiveMQSecurityMan
override fun validateUser(user: String?, password: String?, certificates: Array<out X509Certificate>?): String? {
return validate(user, password)
}
override fun validateUserAndRole(user: String?, password: String?, roles: MutableSet<Role>?, checkType: CheckType?, address: String?, connection: RemotingConnection?): String? {
return validate(user, password)
}
@ -260,6 +265,7 @@ private class SingleUserSecurityManager(val rpcUser: User) : ActiveMQSecurityMan
private fun isValid(user: String?, password: String?): Boolean {
return rpcUser.username == user && rpcUser.password == password
}
private fun validate(user: String?, password: String?): String? {
return if (isValid(user, password)) user else null
}
@ -303,6 +309,7 @@ data class RPCDriverDSL(
}
)
}
fun createInVmRpcServerArtemisConfig(maxFileSize: Int, maxBufferedBytesPerClient: Long): Configuration {
return ConfigurationImpl().apply {
acceptorConfigurations = setOf(TransportConfiguration(InVMAcceptorFactory::class.java.name))
@ -310,6 +317,7 @@ data class RPCDriverDSL(
configureCommonSettings(maxFileSize, maxBufferedBytesPerClient)
}
}
fun createRpcServerArtemisConfig(maxFileSize: Int, maxBufferedBytesPerClient: Long, baseDirectory: Path, hostAndPort: NetworkHostAndPort): Configuration {
val connectionDirection = ConnectionDirection.Inbound(acceptorFactoryClassName = NettyAcceptorFactory::class.java.name)
return ConfigurationImpl().apply {
@ -321,6 +329,7 @@ data class RPCDriverDSL(
configureCommonSettings(maxFileSize, maxBufferedBytesPerClient)
}
}
val inVmClientTransportConfiguration = TransportConfiguration(InVMConnectorFactory::class.java.name)
fun createNettyClientTransportConfiguration(hostAndPort: NetworkHostAndPort): TransportConfiguration {
return ArtemisTcpTransport.tcpTransport(ConnectionDirection.Outbound(), hostAndPort, null)
@ -503,6 +512,7 @@ class RandomRpcUser {
add(Generator.string())
add(Generator.int())
}
data class Call(val method: Method, val call: () -> Any?)
@JvmStatic

View File

@ -143,7 +143,8 @@ class InMemoryMessagingNetwork(
}
/** This can be set to an object which can inject artificial latency between sender/recipient pairs. */
@Volatile var latencyCalculator: LatencyCalculator? = null
@Volatile
var latencyCalculator: LatencyCalculator? = null
private val timer = Timer()
@Synchronized
@ -327,7 +328,7 @@ class InMemoryMessagingNetwork(
while (!Thread.currentThread().isInterrupted) {
try {
pumpReceiveInternal(true)
} catch(e: InterruptedException) {
} catch (e: InterruptedException) {
break
}
}
@ -452,7 +453,7 @@ class InMemoryMessagingNetwork(
for (handler in deliverTo) {
try {
handler.callback(transfer.toReceivedMessage(), handler)
} catch(e: Exception) {
} catch (e: Exception) {
log.error("Caught exception in handler for $this/${handler.topicSession}", e)
}
}

View File

@ -53,6 +53,6 @@ class MockNetworkMapCache(serviceHub: ServiceHubInternal) : PersistentNetworkMap
*/
@VisibleForTesting
fun deleteRegistration(legalIdentity: Party): Boolean {
return partyNodes.removeIf { legalIdentity.owningKey in it.legalIdentitiesAndCerts.map { it.owningKey }}
return partyNodes.removeIf { legalIdentity.owningKey in it.legalIdentitiesAndCerts.map { it.owningKey } }
}
}

View File

@ -247,7 +247,9 @@ class MockNetwork(private val networkSendManuallyPumped: Boolean = false,
// This does not indirect through the NodeInfo object so it can be called before the node is started.
// It is used from the network visualiser tool.
@Suppress("unused") val place: WorldMapLocation get() = findMyLocation()!!
@Suppress("unused")
val place: WorldMapLocation
get() = findMyLocation()!!
private var dbCloser: (() -> Any?)? = null
override fun <T> initialiseDatabasePersistence(schemaService: SchemaService, insideTransaction: () -> T) = super.initialiseDatabasePersistence(schemaService) {
@ -302,7 +304,7 @@ class MockNetwork(private val networkSendManuallyPumped: Boolean = false,
legalName = MOCK_NET_MAP.name,
notaryIdentity = null,
advertisedServices = arrayOf(),
entropyRoot = BigInteger.valueOf(random63BitValue()),
entropyRoot = BigInteger.valueOf(random63BitValue()),
configOverrides = {},
start = true
).started!!.apply {

View File

@ -82,7 +82,9 @@ open class MockServices(
fun makeTestDatabaseProperties(key: String? = null, value: String? = null): Properties {
val props = Properties()
props.setProperty("transactionIsolationLevel", "repeatableRead") //for other possible values see net.corda.node.utilities.CordaPeristence.parserTransactionIsolationLevel(String)
if (key != null) { props.setProperty(key, value) }
if (key != null) {
props.setProperty(key, value)
}
return props
}
@ -154,10 +156,11 @@ open class MockServices(
override val contractUpgradeService: ContractUpgradeService get() = throw UnsupportedOperationException()
override val networkMapCache: NetworkMapCache get() = throw UnsupportedOperationException()
override val clock: Clock get() = Clock.systemUTC()
override val myInfo: NodeInfo get() {
val identity = getTestPartyAndCertificate(MEGA_CORP.name, key.public)
return NodeInfo(emptyList(), listOf(identity), 1, serial = 1L)
}
override val myInfo: NodeInfo
get() {
val identity = getTestPartyAndCertificate(MEGA_CORP.name, key.public)
return NodeInfo(emptyList(), listOf(identity), 1, serial = 1L)
}
override val transactionVerifierService: TransactionVerifierService get() = InMemoryTransactionVerifierService(2)
val mockCordappProvider: MockCordappProvider = MockCordappProvider(CordappLoader.createWithTestPackages(cordappPackages + CordappLoader.testPackages)).start(attachments) as MockCordappProvider
override val cordappProvider: CordappProvider = mockCordappProvider

View File

@ -25,7 +25,8 @@ class TestClock(private var delegateClock: Clock = Clock.systemUTC()) : MutableC
/**
* Advance this [Clock] by the specified [Duration] for testing purposes.
*/
@Synchronized fun advanceBy(duration: Duration) {
@Synchronized
fun advanceBy(duration: Duration) {
delegateClock = offset(delegateClock, duration)
notifyMutationObservers()
}
@ -35,7 +36,8 @@ class TestClock(private var delegateClock: Clock = Clock.systemUTC()) : MutableC
*
* This will only be approximate due to the time ticking away, but will be some time shortly after the requested [Instant].
*/
@Synchronized fun setTo(newInstant: Instant) = advanceBy(instant() until newInstant)
@Synchronized
fun setTo(newInstant: Instant) = advanceBy(instant() until newInstant)
@Synchronized override fun instant(): Instant {
return delegateClock.instant()

View File

@ -46,6 +46,7 @@ class NodeProcess(
class Factory(val buildDirectory: Path = Paths.get("build"),
val cordaJar: Path = Paths.get(this::class.java.getResource("/corda.jar").toURI())) {
val nodesDirectory = buildDirectory / formatter.format(Instant.now())
init {
nodesDirectory.createDirectories()
}
@ -95,11 +96,11 @@ class NodeProcess(
private fun startNode(nodeDir: Path): Process {
val builder = ProcessBuilder()
.command(javaPath.toString(), "-jar", cordaJar.toString())
.directory(nodeDir.toFile())
.command(javaPath.toString(), "-jar", cordaJar.toString())
.directory(nodeDir.toFile())
builder.environment().putAll(mapOf(
"CAPSULE_CACHE_DIR" to (buildDirectory / "capsule").toString()
"CAPSULE_CACHE_DIR" to (buildDirectory / "capsule").toString()
))
return builder.start()

View File

@ -117,8 +117,8 @@ fun freePort(): Int = freePortCounter.getAndAccumulate(0) { prev, _ -> 30000 + (
* to the Node, some other process else could allocate the returned ports.
*/
fun getFreeLocalPorts(hostName: String, numberToAlloc: Int): List<NetworkHostAndPort> {
val freePort = freePortCounter.getAndAccumulate(0) { prev, _ -> 30000 + (prev - 30000 + numberToAlloc) % 10000 }
return (freePort .. freePort + numberToAlloc - 1).map { NetworkHostAndPort(hostName, it) }
val freePort = freePortCounter.getAndAccumulate(0) { prev, _ -> 30000 + (prev - 30000 + numberToAlloc) % 10000 }
return (freePort..freePort + numberToAlloc - 1).map { NetworkHostAndPort(hostName, it) }
}
@JvmOverloads
@ -147,17 +147,17 @@ fun getTestPartyAndCertificate(name: CordaX500Name, publicKey: PublicKey, trustR
}
@Suppress("unused")
inline fun <reified T : Any> T.kryoSpecific(reason: String, function: () -> Unit) = if(!AMQP_ENABLED) {
inline fun <reified T : Any> T.kryoSpecific(reason: String, function: () -> Unit) = if (!AMQP_ENABLED) {
function()
} else {
loggerFor<T>().info("Ignoring Kryo specific test, reason: $reason" )
loggerFor<T>().info("Ignoring Kryo specific test, reason: $reason")
}
@Suppress("unused")
inline fun <reified T : Any> T.amqpSpecific(reason: String, function: () -> Unit) = if(AMQP_ENABLED) {
inline fun <reified T : Any> T.amqpSpecific(reason: String, function: () -> Unit) = if (AMQP_ENABLED) {
function()
} else {
loggerFor<T>().info("Ignoring AMQP specific test, reason: $reason" )
loggerFor<T>().info("Ignoring AMQP specific test, reason: $reason")
}
/**
@ -165,6 +165,7 @@ inline fun <reified T : Any> T.amqpSpecific(reason: String, function: () -> Unit
* TODO: Should be removed after multiple identities are introduced.
*/
fun NodeInfo.chooseIdentityAndCert(): PartyAndCertificate = legalIdentitiesAndCerts.first()
fun NodeInfo.chooseIdentity(): Party = chooseIdentityAndCert().party
/** Returns the identity of the first notary found on the network */
fun ServiceHub.getDefaultNotary(): Party = networkMapCache.notaryIdentities.first()

View File

@ -6,15 +6,16 @@ import java.time.Duration
* Ideas borrowed from "io.kotlintest" with some improvements made
* This is meant for use from Kotlin code use only mainly due to it's inline/reified nature
*/
inline fun <reified E : Throwable, R>eventually(duration: Duration, f: () -> R): R {
inline fun <reified E : Throwable, R> eventually(duration: Duration, f: () -> R): R {
val end = System.nanoTime() + duration.toNanos()
var times = 0
while (System.nanoTime() < end) {
try {
return f()
} catch (e: Throwable) {
when(e) {
is E -> {}// ignore and continue
when (e) {
is E -> {
}// ignore and continue
else -> throw e // unexpected exception type - rethrow
}
}

View File

@ -78,14 +78,15 @@ class FlowStackSnapshotFactoryImpl : FlowStackSnapshotFactory {
return FlowStackSnapshot(Instant.now(), flowClass.name, frames)
}
private val StackTraceElement.instrumentedAnnotation: Instrumented? get() {
Class.forName(className).methods.forEach {
if (it.name == methodName && it.isAnnotationPresent(Instrumented::class.java)) {
return it.getAnnotation(Instrumented::class.java)
private val StackTraceElement.instrumentedAnnotation: Instrumented?
get() {
Class.forName(className).methods.forEach {
if (it.name == methodName && it.isAnnotationPresent(Instrumented::class.java)) {
return it.getAnnotation(Instrumented::class.java)
}
}
return null
}
return null
}
private fun removeConstructorStackTraceElements(stackTrace: List<StackTraceElement>): List<StackTraceElement> {
val newStackTrace = ArrayList<StackTraceElement>()

View File

@ -12,10 +12,13 @@ import kotlin.reflect.jvm.reflect
fun <A : Any, R> measure(a: Iterable<A>, f: (A) -> R) =
measure(listOf(a), f.reflect()!!) { f(uncheckedCast(it[0])) }
fun <A : Any, B : Any, R> measure(a: Iterable<A>, b: Iterable<B>, f: (A, B) -> R) =
measure(listOf(a, b), f.reflect()!!) { f(uncheckedCast(it[0]), uncheckedCast(it[1])) }
fun <A : Any, B : Any, C : Any, R> measure(a: Iterable<A>, b: Iterable<B>, c: Iterable<C>, f: (A, B, C) -> R) =
measure(listOf(a, b, c), f.reflect()!!) { f(uncheckedCast(it[0]), uncheckedCast(it[1]), uncheckedCast(it[2])) }
fun <A : Any, B : Any, C : Any, D : Any, R> measure(a: Iterable<A>, b: Iterable<B>, c: Iterable<C>, d: Iterable<D>, f: (A, B, C, D) -> R) =
measure(listOf(a, b, c, d), f.reflect()!!) { f(uncheckedCast(it[0]), uncheckedCast(it[1]), uncheckedCast(it[2]), uncheckedCast(it[3])) }
@ -30,8 +33,8 @@ private fun <R> measure(paramIterables: List<Iterable<Any?>>, kCallable: KCallab
}
data class MeasureResult<out R>(
val parameters: List<Pair<String, Any?>>,
val result: R
val parameters: List<Pair<String, Any?>>,
val result: R
)
fun <A> iterateLexical(iterables: List<Iterable<A>>): Iterable<List<A>> {

View File

@ -22,32 +22,32 @@ fun initialiseTestSerialization() {
// Check that everything is configured for testing with mutable delegating instances.
try {
check(SerializationDefaults.SERIALIZATION_FACTORY is TestSerializationFactory)
} catch(e: IllegalStateException) {
} catch (e: IllegalStateException) {
SerializationDefaults.SERIALIZATION_FACTORY = TestSerializationFactory()
}
try {
check(SerializationDefaults.P2P_CONTEXT is TestSerializationContext)
} catch(e: IllegalStateException) {
} catch (e: IllegalStateException) {
SerializationDefaults.P2P_CONTEXT = TestSerializationContext()
}
try {
check(SerializationDefaults.RPC_SERVER_CONTEXT is TestSerializationContext)
} catch(e: IllegalStateException) {
} catch (e: IllegalStateException) {
SerializationDefaults.RPC_SERVER_CONTEXT = TestSerializationContext()
}
try {
check(SerializationDefaults.RPC_CLIENT_CONTEXT is TestSerializationContext)
} catch(e: IllegalStateException) {
} catch (e: IllegalStateException) {
SerializationDefaults.RPC_CLIENT_CONTEXT = TestSerializationContext()
}
try {
check(SerializationDefaults.STORAGE_CONTEXT is TestSerializationContext)
} catch(e: IllegalStateException) {
} catch (e: IllegalStateException) {
SerializationDefaults.STORAGE_CONTEXT = TestSerializationContext()
}
try {
check(SerializationDefaults.CHECKPOINT_CONTEXT is TestSerializationContext)
} catch(e: IllegalStateException) {
} catch (e: IllegalStateException) {
SerializationDefaults.CHECKPOINT_CONTEXT = TestSerializationContext()
}

View File

@ -274,7 +274,7 @@ data class TestLedgerDSLInterpreter private constructor(
transactionLabel: String?,
transactionBuilder: TransactionBuilder,
dsl: TransactionDSL<TestTransactionDSLInterpreter>.() -> Unit
) = recordTransactionWithTransactionMap(transactionLabel, transactionBuilder, dsl, nonVerifiedTransactionWithLocations, fillTransaction = true)
) = recordTransactionWithTransactionMap(transactionLabel, transactionBuilder, dsl, nonVerifiedTransactionWithLocations, fillTransaction = true)
override fun tweak(
dsl: LedgerDSL<TestTransactionDSLInterpreter,

View File

@ -119,7 +119,7 @@ class TransactionDSL<out T : TransactionDSLInterpreter>(val interpreter: T) : Tr
*/
@JvmOverloads
fun output(contractClassName: ContractClassName, contractState: ContractState, attachmentConstraint: AttachmentConstraint = AutomaticHashConstraint) =
_output(contractClassName,null, DUMMY_NOTARY, null, attachmentConstraint, contractState)
_output(contractClassName, null, DUMMY_NOTARY, null, attachmentConstraint, contractState)
/**
* Adds a command to the transaction.
@ -146,5 +146,5 @@ class TransactionDSL<out T : TransactionDSLInterpreter>(val interpreter: T) : Tr
*/
fun attachment(contractClassName: ContractClassName) = _attachment(contractClassName)
fun attachments(vararg contractClassNames: ContractClassName) = contractClassNames.forEach { attachment(it)}
fun attachments(vararg contractClassNames: ContractClassName) = contractClassNames.forEach { attachment(it) }
}

View File

@ -19,8 +19,7 @@ class DummyDealContract : Contract {
data class State(
override val participants: List<AbstractParty>,
override val linearId: UniqueIdentifier) : DealState, QueryableState
{
override val linearId: UniqueIdentifier) : DealState, QueryableState {
constructor(participants: List<AbstractParty> = listOf(),
ref: String) : this(participants, UniqueIdentifier(ref))

View File

@ -32,7 +32,7 @@ import java.util.*
@JvmOverloads
fun ServiceHub.fillWithSomeTestDeals(dealIds: List<String>,
participants: List<AbstractParty> = emptyList(),
notary: Party = DUMMY_NOTARY) : Vault<DealState> {
notary: Party = DUMMY_NOTARY): Vault<DealState> {
val myKey: PublicKey = myInfo.chooseIdentity().owningKey
val me = AnonymousParty(myKey)
@ -63,7 +63,7 @@ fun ServiceHub.fillWithSomeTestLinearStates(numberToCreate: Int,
linearString: String = "",
linearNumber: Long = 0L,
linearBoolean: Boolean = false,
linearTimestamp: Instant = now()) : Vault<LinearState> {
linearTimestamp: Instant = now()): Vault<LinearState> {
val myKey: PublicKey = myInfo.chooseIdentity().owningKey
val me = AnonymousParty(myKey)
val issuerKey = DUMMY_NOTARY_KEY
@ -196,7 +196,7 @@ fun calculateRandomlySizedAmounts(howMuch: Amount<Currency>, min: Int, max: Int,
fun <T : LinearState> ServiceHub.consume(states: List<StateAndRef<T>>, notary: Party) {
// Create a txn consuming different contract types
states.forEach {
val builder = TransactionBuilder(notary = notary).apply {
val builder = TransactionBuilder(notary = notary).apply {
addInputState(it)
addCommand(dummyCommand(notary.owningKey))
}
@ -238,7 +238,7 @@ fun <T : LinearState> ServiceHub.consumeAndProduce(states: List<StateAndRef<T>>,
fun ServiceHub.consumeDeals(dealStates: List<StateAndRef<DealState>>, notary: Party) = consume(dealStates, notary)
fun ServiceHub.consumeLinearStates(linearStates: List<StateAndRef<LinearState>>, notary: Party) = consume(linearStates, notary)
fun ServiceHub.evolveLinearStates(linearStates: List<StateAndRef<LinearState>>, notary: Party) = consumeAndProduce(linearStates, notary)
fun ServiceHub.evolveLinearState(linearState: StateAndRef<LinearState>, notary: Party) : StateAndRef<LinearState> = consumeAndProduce(linearState, notary)
fun ServiceHub.evolveLinearState(linearState: StateAndRef<LinearState>, notary: Party): StateAndRef<LinearState> = consumeAndProduce(linearState, notary)
/**
* Consume cash, sending any change to the default identity for this node. Only suitable for use in test scenarios,
@ -254,7 +254,7 @@ fun ServiceHub.consumeCash(amount: Amount<Currency>, to: Party = CHARLIE, notary
*/
@JvmOverloads
fun ServiceHub.consumeCash(amount: Amount<Currency>, ourIdentity: PartyAndCertificate, to: Party = CHARLIE, notary: Party): Vault.Update<ContractState> {
val update = vaultService.rawUpdates.toFuture()
val update = vaultService.rawUpdates.toFuture()
val services = this
// A tx that spends our money.

View File

@ -29,6 +29,7 @@ class HttpApi(val root: URL, val mapper: ObjectMapper = defaultMapper) {
companion object {
fun fromHostAndPort(hostAndPort: NetworkHostAndPort, base: String, protocol: String = "http", mapper: ObjectMapper = defaultMapper): HttpApi
= HttpApi(URL("$protocol://$hostAndPort/$base/"), mapper)
private val defaultMapper: ObjectMapper by lazy {
net.corda.client.jackson.JacksonSupport.createNonRpcMapper()
}

View File

@ -20,8 +20,8 @@ object DummyLinearStateSchema
object DummyLinearStateSchemaV1 : MappedSchema(schemaFamily = DummyLinearStateSchema.javaClass, version = 1, mappedTypes = listOf(PersistentDummyLinearState::class.java)) {
@Entity
@Table(name = "dummy_linear_states",
indexes = arrayOf(Index(name = "external_id_idx", columnList = "external_id"),
Index(name = "uuid_idx", columnList = "uuid")))
indexes = arrayOf(Index(name = "external_id_idx", columnList = "external_id"),
Index(name = "uuid_idx", columnList = "uuid")))
class PersistentDummyLinearState(
/** [ContractState] attributes */