Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
52 commits
Select commit Hold shift + click to select a range
9f5e39e
Merge Splice main
martinflorian-da Oct 10, 2025
63c5db3
main sync fixes
martinflorian-da Oct 10, 2025
80a5715
Bump Canton binary
martinflorian-da Oct 10, 2025
55eeedc
Canton fork bump: undo our changes
martinflorian-da Oct 10, 2025
694a5cf
Canton fork bump: bump Canton commit
martinflorian-da Oct 10, 2025
0535934
Canton fork bump: Reapply our changes (things that just worked)
martinflorian-da Oct 10, 2025
e6bccf0
Canton fork bump: Reapply our changes (this is me processing .rej fil…
martinflorian-da Oct 10, 2025
489b90f
Canton fork bump: Bump Canton commit and Canton/SDK versions
martinflorian-da Oct 10, 2025
58a2e04
Controversial change 1: remove `time-proof-freshness-proportion = 0` …
martinflorian-da Oct 10, 2025
98e1026
Various less interesting fixes
martinflorian-da Oct 10, 2025
a2ae58a
Controversial change 2: restore `daml/lf/value/json`
martinflorian-da Oct 10, 2025
db51597
More boring fixes
martinflorian-da Oct 13, 2025
afc85de
Interesting change 3: use default SequencerConnectionPoolDelays every…
martinflorian-da Oct 13, 2025
5d8e197
Interesting change 4: Don't support "vet on upload"
martinflorian-da Oct 13, 2025
774801d
boring fixes
martinflorian-da Oct 13, 2025
8654944
Interesting change 5: Me potentially breaking UpdateHistory
martinflorian-da Oct 13, 2025
b288c37
less interesting fixes
martinflorian-da Oct 13, 2025
8f34d6f
Interesting change 6: remove topologyChangeDelayDuration mostly every…
martinflorian-da Oct 13, 2025
2ca3261
more boring fixes, compiles now!
martinflorian-da Oct 13, 2025
2a09ee7
formatFix (but excluding the canton fork)
martinflorian-da Oct 13, 2025
67659dd
boring: more topology-change-delay-duration cleanup
martinflorian-da Oct 13, 2025
8317f5f
formatFix (also canton)
martinflorian-da Oct 13, 2025
fd786b3
per sync vetting seems required now
cocreature Oct 14, 2025
c4ff270
Fix topology change delay in simtime
cocreature Oct 14, 2025
b21e61a
maybe bump canton again
cocreature Oct 14, 2025
acbf08c
Disable connection pool
cocreature Oct 14, 2025
85ed8a7
[static] fix: ignore python headers in canton/community
martinflorian-da Oct 14, 2025
e19d6db
[static] fix docs build
martinflorian-da Oct 14, 2025
9976bf6
Disable auto-load-dars
cocreature Oct 14, 2025
35a9d37
Set force flag when setting mediator reaction timeout = 0
cocreature Oct 14, 2025
67e213d
Config fixes and log ignores
cocreature Oct 14, 2025
377342f
persist dependencies log ignore
martinflorian-da Oct 14, 2025
540b36d
Try to fix dar uploads
cocreature Oct 14, 2025
8abaa22
Add missing parenthesis
cocreature Oct 14, 2025
b6049af
Fix no_vetted_packages macro
cocreature Oct 14, 2025
d39f789
Fix merge strategy for fastdoubleparser notice
cocreature Oct 14, 2025
a8b58d2
fix simtime tests by actually setting topologyChangeDelay
martinflorian-da Oct 14, 2025
8fb7f06
Extra post shutdown log ignore
cocreature Oct 15, 2025
8ceda21
Ignore SplitwellUpgradeIntegrationTest
cocreature Oct 15, 2025
06daf21
Add missing force flag to unvetting call
cocreature Oct 15, 2025
31f3683
Upgrade Canton and reenable admin workflow loading again
cocreature Oct 15, 2025
ce504ed
More log ignores
cocreature Oct 15, 2025
fb7f989
Query sync store when changing vetting
cocreature Oct 15, 2025
81a9233
Try to fix migration tests
cocreature Oct 15, 2025
e84c8e6
Fix formatting
cocreature Oct 15, 2025
6513b5b
Pass force flag when changing domain parameters
cocreature Oct 15, 2025
2030865
Temporarily only run migration test for faster CI feedback
cocreature Oct 15, 2025
1e1b8ba
Revert "Temporarily only run migration test for faster CI feedback"
cocreature Oct 15, 2025
deb52f3
Fix package lock json
cocreature Oct 15, 2025
866f2c3
Fix stream limits configuration
cocreature Oct 15, 2025
bbbd262
Fix endpoint name on 3.4
cocreature Oct 15, 2025
f18cc19
Improve logs on failed sync connections
cocreature Oct 15, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
5 changes: 1 addition & 4 deletions .envrc.vars
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ export PUBLIC_CONFIGS_PATH=${SPLICE_ROOT}/cluster/configs/configs

# Inrease code heap sizes to avoid issues
# Defaults NonNMethodCodeHeapSize=7M,NonProfiledCodeHeapSize=122M,ProfiledCodeHeapSize=122M
export SBT_OPTS="-Xmx6G -Xms2G -Xss2M -XX:+UseG1GC -XX:NonNMethodCodeHeapSize=32M -XX:NonProfiledCodeHeapSize=256M -XX:ProfiledCodeHeapSize=256M -XX:ReservedCodeCacheSize=544M"
export SBT_OPTS="-Xmx8G -Xms2G -Xss2M -XX:+UseG1GC -XX:NonNMethodCodeHeapSize=32M -XX:NonProfiledCodeHeapSize=256M -XX:ProfiledCodeHeapSize=256M -XX:ReservedCodeCacheSize=544M"

# Provide a simple way to get the path to `sbt-launch.jar` for IntelliJ setup
export SBT_LAUNCH_PATH="$(dirname "$(dirname "$(which sbt)")")/share/sbt/bin/sbt-launch.jar"
Expand All @@ -31,7 +31,6 @@ export POSTGRES_HOST="localhost"
export POSTGRES_USER=postgres
export POSTGRES_PASSWORD=postgres


# ** Docker&Helm registry configs

export GHCR=ghcr.io
Expand All @@ -47,7 +46,6 @@ export RELEASE_HELM_REGISTRY=$RELEASE_REGISTRY/helm
export OCI_RELEASE_HELM_REGISTRY=oci://$RELEASE_HELM_REGISTRY
export RELEASE_DOCKER_REGISTRY=$RELEASE_REGISTRY/docker


# ** Cluster deployment configs

export CLOUDSDK_COMPUTE_REGION="us-central1"
Expand Down Expand Up @@ -102,7 +100,6 @@ export SPLICE_OAUTH_DEV_CLIENT_ID_SPLITWELL_VALIDATOR=hqpZ6TP0wGyG2yYwhH6NLpuo0M
export SPLICE_OAUTH_SV_TEST_AUTHORITY=canton-network-sv-test.us.auth0.com
export SPLICE_OAUTH_SV_TEST_CLIENT_ID_VALIDATOR=bUfFRpl2tEfZBB7wzIo9iRNGTj8wMeIn


# Force auth through gke-gcloud-auth-plugin
# See https://cloud.google.com/blog/products/containers-kubernetes/kubectl-auth-changes-in-gke
export USE_GKE_GCLOUD_AUTH_PLUGIN=true
Expand Down
1 change: 1 addition & 0 deletions .github/actions/tests/skip_on_static/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ runs:
# is _before_ the approval already e.g. when an external contributor
# created the PR and not when the maintainer approved it after adding the static label.
pr_labels=$(curl -sSL --fail-with-body -H "Authorization: Bearer ${{ inputs.gh_token }}" \
--retry 10 --retry-delay 10 --retry-all-errors \
-H "Accept: application/vnd.github.v3+json" \
"${{ github.event.pull_request.url }}" | jq '.labels')
echo "Pull request labels: $pr_labels"
Expand Down
26 changes: 26 additions & 0 deletions .github/workflows/assign_issues_external.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
name: Auto-assign issues from external contributors

on:
issues:
types: [opened]

jobs:
assign:
runs-on: ubuntu-24.04
steps:
- name: Assign issue from external contributors
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
script: |
const issue = context.payload.issue;
const author = issue.user.login.toLowerCase();
const isInternalContributor = author.endsWith('-da') || author === 'cocreature';
if (issue.assignees.length === 0 && !isInternalContributor) {
console.log('Assigning issue to the triage team...');
await github.rest.issues.addAssignees({
issue_number: issue.number,
owner: context.repo.owner,
repo: context.repo.repo,
assignees: ['isegall-da', 'martinflorian-da', 'ray-roestenburg-da'],
});
}
2 changes: 1 addition & 1 deletion LATEST_RELEASE
Original file line number Diff line number Diff line change
@@ -1 +1 @@
0.4.18
0.4.20
25 changes: 14 additions & 11 deletions MAINTENANCE.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,16 +14,17 @@
## Bumping Canton

1. Generate a patch file of the JSON API v2 OpenAPI definition by running `diff-openapi.sh` in `token-standard/dependencies/canton-json-api-v2/openapi/`.
2. Update the Canton Enterprise `version` in `nix/canton-sources.json`. The currently published versions on
2. Choose the Canton version you wish to upgrade to. The currently published versions on
Artifactory can be found [here](https://digitalasset.jfrog.io/ui/repos/tree/General/canton-enterprise).
3. Update the `sha256` hash in the same file by first running `direnv reload` to make the hash validation fail
and using the 'got' hash printed by nix. This is usually easier and more accurate than copying the sha256 hash
displayed for the release version in Artifactory.
4. In case you have also made configuration changes to Canton in `simple-topology-canton.conf`, remember
3. Compute the hashes of the corresponding enterprise and oss versions by running:
`nix store prefetch-file --json --hash-type sha256 https://digitalasset.jfrog.io/artifactory/canton-enterprise/canton-enterprise-<version>.tar.gz | jq -r '.hash'` and
`nix store prefetch-file --json --hash-type sha256 https://www.canton.io/releases/canton-open-source-<version>.tar.gz | jq -r '.hash'`
4. Update the Canton version and hashes of the oss and enterprise versions in `nix/canton-sources.json`.
5. In case you have also made configuration changes to Canton in `simple-topology-canton.conf`, remember
to also make the corresponding changes for our cluster deployments. It is recommended to test any configuration
changes on scratchnet first.
5. Update the OpenAPI definitions from step 1 by running `update-openapi.sh` in `token-standard/dependencies/canton-json-api-v2/openapi/`.
6. Cleanup the `openapi.patch` file.
6. Update the OpenAPI definitions from step 1 by running `update-openapi.sh` in `token-standard/dependencies/canton-json-api-v2/openapi/`.
7. Cleanup the `openapi.patch` file.
Check `token-standard/dependencies/canton-json-api-v2/openapi/CHANGES.md` and apply any changes manually if CI breaks due to
token standard CLI issues that look caused by bad OpenAPI definitions.

Expand All @@ -46,7 +47,7 @@ Initial setup:
1. Check out the [Canton **Open Source** repo](https://github.com/digital-asset/canton)
2. Define the environment variable used in the commands below using `export PATH_TO_CANTON_OSS=<your-canton-oss-repo-path>`. This can be added to your private env vars.

Current Canton commit: `0467621f75718cedee33887a535fab598954b639`
Current Canton commit: `79e645eb60ba378536a6d62cabbeab78d1be6c61`

1. Checkout the **current Canton commit listed above** in the Canton open source repo from above, so we can diff our current fork against this checkout.
2. Change to your checkout of the Splice repo and execute the following steps:
Expand All @@ -66,9 +67,11 @@ Current Canton commit: `0467621f75718cedee33887a535fab598954b639`
1. The current Canton commit in this `README.md`
2. If we're also updating the sdk version (this can lead to dar changes so we might skip it)
1. Set `version` in `CantonDependencies.scala` to the SDK version from Step 3.1
2. Set `sdk_version` in `nix/canton-sources.json` to the SDK release version from Step 3.1.
3. Bump the sdk version in our own `daml.yaml` and `*.nix` files via `./set-sdk.sh $sdkversion` to the same Daml SDK version.
4. Change the hashes for both the linux and macos releases in `daml2js.nix`. To do so change a character of the `sha256` digest (e.g. "ef..." -> "0f...") in `daml2js.nix`,
2. Set `tooling_sdk_version` in `nix/canton-sources.json` to the SDK release version from Step 3.1.
3. Find in [Daml releases](https://github.com/digital-asset/daml/releases) the daml release that is "based on SDK" with the SDK from Step 3.1.
Set `daml_release` in `nix/cantno-sources.json` to that release.
4. Bump the sdk version in our own `daml.yaml` and `*.nix` files via `./set-sdk.sh $sdkversion` to the same Daml SDK version.
5. Change the hashes for both the linux and macos releases in `daml2js.nix`. To do so change a character of the `sha256` digest (e.g. "ef..." -> "0f...") in `daml2js.nix`,
and then call `direnv reload` to make the hash validation fail. Adjust the `sha256` digest by copying back the new hash when Nix throws an error during validation.
Note that nix may print the hash in base64, when you specified it in base16, or vice versa. Just copying the 'got' hash should work in either case.
6. Create another commit, `git add -A && git reset '*.rej' && git commit -s -m"Bump Canton commit and Canton/SDK versions" --no-verify`
Expand Down
8 changes: 4 additions & 4 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ wallet-payments-dar := ${SPLICE_ROOT}/daml/splice-wallet-payments/.daml/dist/spl
build: $(app-bundle) $(load-tester) cluster/build ## Build the Splice app bundle and ensure cluster scripts are ready to run.

$(app-bundle): $(canton-amulet-dar) $(wallet-payments-dar)
sbt --batch bundle
sbt --client --batch bundle

$(canton-amulet-dar) $(wallet-payments-dar) &:
sbt --batch 'splice-amulet-daml'/damlBuild 'splice-wallet-payments-daml'/damlBuild
Expand All @@ -31,7 +31,7 @@ $(load-tester):
cd "${SPLICE_ROOT}/load-tester" && npm ci && npm run build

$(party-allocator):
sbt --batch 'party-allocator/npmBuild'
sbt --client --batch 'party-allocator/npmBuild'

.PHONY: update-expected
update-expected: cluster/pulumi/update-expected
Expand All @@ -52,13 +52,13 @@ clean: cluster/clean

.PHONY: clean-all
clean-all: clean ## Completely clean all local build state, including model codegen.
sbt --batch clean-splice
sbt --client --batch clean-splice
find . -type d -name ".daml" -exec rm -rf {} +
find . -type d -name "target" -exec rm -rf {} +

.PHONY: format
format: cluster/format ## Automatically reformat and apply scalaFix to source code
sbt --batch formatFix
sbt --client --batch formatFix

.PHONY: help
help: ## Show list of available make targets
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ import org.lfdecentralizedtrust.splice.environment.{
}
import org.lfdecentralizedtrust.splice.util.HasHealth
import com.daml.scalautil.Statement.discard
import com.digitalasset.canton.LfPackageId
import com.digitalasset.canton.admin.api.client.commands.GrpcAdminCommand
import com.digitalasset.canton.admin.api.client.data.NodeStatus
import com.digitalasset.canton.config.NonNegativeDuration
Expand All @@ -40,6 +41,8 @@ import com.digitalasset.canton.logging.NamedLoggerFactory
import com.digitalasset.canton.participant.config.RemoteParticipantConfig
import com.digitalasset.canton.synchronizer.sequencer.config.RemoteSequencerConfig
import com.digitalasset.canton.topology.NodeIdentity
import com.digitalasset.canton.topology.admin.grpc.TopologyStoreId
import com.digitalasset.canton.topology.transaction.VettedPackage

import java.io.File
import scala.concurrent.ExecutionContext
Expand Down Expand Up @@ -250,10 +253,25 @@ class ParticipantClientReference(
def upload_dar_unless_exists(
path: String
): Unit = {
val hash = DarParser.assertReadArchiveFromFile(new File(path)).main.getHash
val dar = DarParser.assertReadArchiveFromFile(new File(path))
val hash = dar.main.getHash
val pkgs = this.ledger_api.packages.list()
if (!pkgs.map(_.packageId).contains(hash)) {
discard[String](this.dars.upload(path))
discard[String](this.dars.upload(path, vetAllPackages = false))
val connected = this.synchronizers.list_connected()
if (connected.isEmpty) {
logger.error(s"Trying to vet $path on ${this.id} but not connected to any synchronizer")
}
connected.foreach { sync =>
this.topology.vetted_packages.propose_delta(
this.id,
adds = dar.all
.map(p => LfPackageId.assertFromString(p.getHash))
.distinct
.map(VettedPackage(_, None, None)),
store = TopologyStoreId.Synchronizer(sync.synchronizerId),
)
}
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -343,10 +343,10 @@ class SvAppBackendReference(
}

@Help.Summary("Prepare a validator onboarding and return an onboarding secret (via admin API)")
def prepareValidatorOnboarding(expiresIn: FiniteDuration): String =
def prepareValidatorOnboarding(expiresIn: FiniteDuration, partyHint: Option[String]): String =
consoleEnvironment.run {
httpCommand(
HttpSvAdminAppClient.PrepareValidatorOnboarding(expiresIn)
HttpSvAdminAppClient.PrepareValidatorOnboarding(expiresIn, partyHint)
)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,10 @@ import com.digitalasset.canton.concurrent.ExecutionContextIdlenessExecutorServic
import com.digitalasset.canton.config.ProcessingTimeout
import com.digitalasset.canton.environment.ManagedNodes
import com.digitalasset.canton.logging.NamedLoggerFactory
import com.digitalasset.canton.resource.DbMigrationsFactory

/** Scan app instances. */
class ScanApps(
create: (String, ScanAppBackendConfig) => ScanAppBootstrap,
migrationsFactory: DbMigrationsFactory,
_timeouts: ProcessingTimeout,
configs: Map[String, ScanAppBackendConfig],
parametersFor: String => SharedSpliceAppParameters,
Expand All @@ -30,7 +28,6 @@ class ScanApps(
ScanAppBootstrap,
](
create,
migrationsFactory,
_timeouts,
configs,
parametersFor,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@ import com.digitalasset.canton.console.{
NodeReferences,
StandardConsoleOutput,
}
import com.digitalasset.daml.lf.data.Ref.PackageId
import com.digitalasset.daml.lf.typesig.PackageSignature
import org.apache.pekko.actor.ActorSystem
import org.lfdecentralizedtrust.splice.config.SpliceConfig
import org.lfdecentralizedtrust.splice.console.*
Expand All @@ -31,16 +33,11 @@ class SpliceConsoleEnvironment(

override type Config = SpliceConfig

val packageSignatures = ResourceTemplateDecoder.loadPackageSignaturesFromResources(
DarResources.TokenStandard.allPackageResources.flatMap(_.all) ++
DarResources.splitwell.all ++
DarResources.validatorLifecycle.all ++
DarResources.wallet.all ++
DarResources.amulet.all ++
DarResources.dsoGovernance.all
)
implicit val actorSystem: ActorSystem = environment.actorSystem
val templateDecoder = new ResourceTemplateDecoder(packageSignatures, environment.loggerFactory)
private lazy val templateDecoder = new ResourceTemplateDecoder(
SpliceConsoleEnvironment.packageSignatures,
environment.loggerFactory,
)

lazy val httpCommandRunner: ConsoleHttpCommandRunner = new ConsoleHttpCommandRunner(
environment,
Expand Down Expand Up @@ -341,3 +338,17 @@ class SpliceConsoleEnvironment(
case _ => 5
}
}

object SpliceConsoleEnvironment {

private lazy val packageSignatures: Map[PackageId, PackageSignature] =
ResourceTemplateDecoder.loadPackageSignaturesFromResources(
DarResources.TokenStandard.allPackageResources.flatMap(_.all) ++
DarResources.splitwell.all ++
DarResources.validatorLifecycle.all ++
DarResources.wallet.all ++
DarResources.amulet.all ++
DarResources.dsoGovernance.all
)

}
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ import com.digitalasset.canton.console.ConsoleOutput
import com.digitalasset.canton.environment.*
import com.digitalasset.canton.logging.NamedLoggerFactory
import com.digitalasset.canton.participant.CommunityParticipantNodeBootstrapFactory
import com.digitalasset.canton.resource.CommunityDbMigrationsMetaFactory
import com.digitalasset.canton.synchronizer.mediator.CommunityMediatorNodeBootstrapFactory
import com.digitalasset.canton.synchronizer.sequencer.CommunitySequencerNodeBootstrapFactory
import org.lfdecentralizedtrust.splice.config.SpliceConfig
Expand All @@ -34,7 +33,6 @@ class SpliceEnvironment(
CommunityParticipantNodeBootstrapFactory,
CommunitySequencerNodeBootstrapFactory,
CommunityMediatorNodeBootstrapFactory,
new CommunityDbMigrationsMetaFactory(loggerFactory),
loggerFactory,
) {

Expand Down Expand Up @@ -73,7 +71,6 @@ class SpliceEnvironment(

lazy val validators = new ValidatorApps(
createValidator,
migrationsFactoryFactory.create(clock),
timeouts,
config.validatorsByString,
config.tryValidatorAppParametersByString,
Expand Down Expand Up @@ -105,7 +102,6 @@ class SpliceEnvironment(

lazy val svs = new SvApps(
createSv,
migrationsFactoryFactory.create(clock),
timeouts,
config.svsByString,
config.trySvAppParametersByString,
Expand Down Expand Up @@ -137,7 +133,6 @@ class SpliceEnvironment(

lazy val scans = new ScanApps(
createScan,
migrationsFactoryFactory.create(clock),
timeouts,
config.scansByString,
config.tryScanAppParametersByString,
Expand Down Expand Up @@ -169,7 +164,6 @@ class SpliceEnvironment(

lazy val splitwells = new SplitwellApps(
createSplitwell,
migrationsFactoryFactory.create(clock),
timeouts,
config.splitwellsByString,
config.trySplitwellAppParametersByString,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,10 @@ import com.digitalasset.canton.concurrent.ExecutionContextIdlenessExecutorServic
import com.digitalasset.canton.config.ProcessingTimeout
import com.digitalasset.canton.environment.ManagedNodes
import com.digitalasset.canton.logging.NamedLoggerFactory
import com.digitalasset.canton.resource.DbMigrationsFactory

/** Splitwell app instances. */
class SplitwellApps(
create: (String, SplitwellAppBackendConfig) => SplitwellAppBootstrap,
migrationsFactory: DbMigrationsFactory,
_timeouts: ProcessingTimeout,
configs: Map[String, SplitwellAppBackendConfig],
parametersFor: String => SharedSpliceAppParameters,
Expand All @@ -30,7 +28,6 @@ class SplitwellApps(
SplitwellAppBootstrap,
](
create,
migrationsFactory,
_timeouts,
configs,
parametersFor,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,10 @@ import com.digitalasset.canton.concurrent.ExecutionContextIdlenessExecutorServic
import com.digitalasset.canton.config.ProcessingTimeout
import com.digitalasset.canton.environment.ManagedNodes
import com.digitalasset.canton.logging.NamedLoggerFactory
import com.digitalasset.canton.resource.DbMigrationsFactory

/** SV app instances. */
class SvApps(
create: (String, SvAppBackendConfig) => SvAppBootstrap,
migrationsFactory: DbMigrationsFactory,
_timeouts: ProcessingTimeout,
configs: Map[String, SvAppBackendConfig],
parametersFor: String => SharedSpliceAppParameters,
Expand All @@ -30,7 +28,6 @@ class SvApps(
SvAppBootstrap,
](
create,
migrationsFactory,
_timeouts,
configs,
parametersFor,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,10 @@ import com.digitalasset.canton.concurrent.ExecutionContextIdlenessExecutorServic
import com.digitalasset.canton.config.ProcessingTimeout
import com.digitalasset.canton.environment.ManagedNodes
import com.digitalasset.canton.logging.NamedLoggerFactory
import com.digitalasset.canton.resource.DbMigrationsFactory

/** Validator app instances. */
class ValidatorApps(
create: (String, ValidatorAppBackendConfig) => ValidatorAppBootstrap,
migrationsFactory: DbMigrationsFactory,
_timeouts: ProcessingTimeout,
configs: Map[String, ValidatorAppBackendConfig],
parametersFor: String => SharedSpliceAppParameters,
Expand All @@ -30,7 +28,6 @@ class ValidatorApps(
ValidatorAppBootstrap,
](
create,
migrationsFactory,
_timeouts,
configs,
parametersFor,
Expand Down
3 changes: 3 additions & 0 deletions apps/app/src/test/resources/include/mediators.conf
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,7 @@ _mediator_template {
# so that mediator will not block other sequencers from pruning even there is lack of activities
# ref: https://github.com/DACH-NY/canton/issues/16371#issuecomment-1885005687
time-tracker.min-observation-duration = 10s
sequencer-client {
use-new-connection-pool = false
}
}
Loading
Loading