Better formatting of model responses.
diff --git a/src/app/ui/main/chat/chat.component.html b/src/app/ui/main/chat/chat.component.html
index 56e32e1..a82a74e 100644
--- a/src/app/ui/main/chat/chat.component.html
+++ b/src/app/ui/main/chat/chat.component.html
@@ -53,7 +53,7 @@
<ng-container *ngIf="s.status === 'QRY_READY'">
<div *ngIf="s.resBody" class="alert alert-success">
- {{s.resBody}}
+ <pre>{{s.resBody | json}}</pre>
</div>
<div *ngIf="s.error" class="alert alert-danger">
{{s.error}}
diff --git a/src/app/ui/main/chat/chat.component.ts b/src/app/ui/main/chat/chat.component.ts
index 19bea1e..a08f55e 100644
--- a/src/app/ui/main/chat/chat.component.ts
+++ b/src/app/ui/main/chat/chat.component.ts
@@ -31,8 +31,9 @@
selectedQuery: NlpQueryState
- @Input()
- allProbes: NlpProbe[]
+ private _allProbes: NlpProbe[]
+
+ private _allModels: NlpModel[]
private _states: NlpQueryState[]
@@ -46,6 +47,26 @@
// No-op.
}
+ get allProbes(): NlpProbe[] {
+ return this._allProbes
+ }
+
+ @Input()
+ set allProbes(value: NlpProbe[]) {
+ if (!this.isSameProbeList(value)) {
+ this._allProbes = value
+
+ // Update models cache.
+ this._allModels = []
+
+ value.forEach(p => {
+ p.models.forEach(m => {
+ this._allModels.push(m)
+ })
+ })
+ }
+ }
+
get states(): NlpQueryState[] {
return this._states
}
@@ -75,15 +96,7 @@
}
allModels(): NlpModel[] {
- const allModels: NlpModel[] = []
-
- this.allProbes.forEach(p => {
- p.models.forEach(m => {
- allModels.push(m)
- })
- })
-
- return allModels
+ return this._allModels
}
async checkStatus() {
@@ -156,4 +169,10 @@
this.selectedModelId = this.allModels()[0].id
}
}
+
+ private isSameProbeList(value: NlpProbe[]) {
+ // Using JSON comparison for simplicity.
+ // Should replace with comparison by probe ID for better performance.
+ return JSON.stringify(value) === JSON.stringify(this._allProbes)
+ }
}