Add Strix camera discovery system with comprehensive database
This commit adds the complete Strix IP camera stream discovery system: - Go-based API server with SSE support for real-time updates - 3,600+ camera brand database with stream URL patterns - Intelligent fuzzy search across camera models - ONVIF discovery and stream validation - RESTful API with health check, camera search, and stream discovery - Makefile for building and deployment - Comprehensive README documentation 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
@@ -0,0 +1,317 @@
|
||||
package database
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/strix-project/strix/internal/models"
|
||||
)
|
||||
|
||||
// Loader handles efficient loading of camera database
|
||||
type Loader struct {
|
||||
brandsPath string
|
||||
patternsPath string
|
||||
parametersPath string
|
||||
brandsCache map[string]*models.Camera
|
||||
patternsCache []models.StreamPattern
|
||||
paramsCache []string
|
||||
mu sync.RWMutex
|
||||
logger interface{ Debug(string, ...any); Error(string, error, ...any) }
|
||||
}
|
||||
|
||||
// NewLoader creates a new database loader
|
||||
func NewLoader(brandsPath, patternsPath, parametersPath string, logger interface{ Debug(string, ...any); Error(string, error, ...any) }) *Loader {
|
||||
return &Loader{
|
||||
brandsPath: brandsPath,
|
||||
patternsPath: patternsPath,
|
||||
parametersPath: parametersPath,
|
||||
brandsCache: make(map[string]*models.Camera),
|
||||
logger: logger,
|
||||
}
|
||||
}
|
||||
|
||||
// LoadBrand loads a specific brand's camera data
|
||||
func (l *Loader) LoadBrand(brandID string) (*models.Camera, error) {
|
||||
l.mu.RLock()
|
||||
if cached, ok := l.brandsCache[brandID]; ok {
|
||||
l.mu.RUnlock()
|
||||
return cached, nil
|
||||
}
|
||||
l.mu.RUnlock()
|
||||
|
||||
// Load from file
|
||||
filePath := filepath.Join(l.brandsPath, brandID+".json")
|
||||
file, err := os.Open(filePath)
|
||||
if err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
return nil, fmt.Errorf("brand %s not found", brandID)
|
||||
}
|
||||
return nil, fmt.Errorf("failed to open brand file: %w", err)
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
var camera models.Camera
|
||||
decoder := json.NewDecoder(file)
|
||||
if err := decoder.Decode(&camera); err != nil {
|
||||
return nil, fmt.Errorf("failed to decode brand data: %w", err)
|
||||
}
|
||||
|
||||
// Cache the result
|
||||
l.mu.Lock()
|
||||
l.brandsCache[brandID] = &camera
|
||||
l.mu.Unlock()
|
||||
|
||||
return &camera, nil
|
||||
}
|
||||
|
||||
// ListBrands returns all available brand IDs
|
||||
func (l *Loader) ListBrands() ([]string, error) {
|
||||
files, err := os.ReadDir(l.brandsPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read brands directory: %w", err)
|
||||
}
|
||||
|
||||
var brands []string
|
||||
for _, file := range files {
|
||||
if !file.IsDir() && strings.HasSuffix(file.Name(), ".json") {
|
||||
brandID := strings.TrimSuffix(file.Name(), ".json")
|
||||
brands = append(brands, brandID)
|
||||
}
|
||||
}
|
||||
|
||||
return brands, nil
|
||||
}
|
||||
|
||||
// LoadPopularPatterns loads popular stream patterns
|
||||
func (l *Loader) LoadPopularPatterns() ([]models.StreamPattern, error) {
|
||||
l.mu.RLock()
|
||||
if l.patternsCache != nil {
|
||||
patterns := l.patternsCache
|
||||
l.mu.RUnlock()
|
||||
return patterns, nil
|
||||
}
|
||||
l.mu.RUnlock()
|
||||
|
||||
file, err := os.Open(l.patternsPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to open patterns file: %w", err)
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
var patterns []models.StreamPattern
|
||||
decoder := json.NewDecoder(file)
|
||||
if err := decoder.Decode(&patterns); err != nil {
|
||||
return nil, fmt.Errorf("failed to decode patterns: %w", err)
|
||||
}
|
||||
|
||||
l.mu.Lock()
|
||||
l.patternsCache = patterns
|
||||
l.mu.Unlock()
|
||||
|
||||
return patterns, nil
|
||||
}
|
||||
|
||||
// LoadQueryParameters loads supported query parameters
|
||||
func (l *Loader) LoadQueryParameters() ([]string, error) {
|
||||
l.mu.RLock()
|
||||
if l.paramsCache != nil {
|
||||
params := l.paramsCache
|
||||
l.mu.RUnlock()
|
||||
return params, nil
|
||||
}
|
||||
l.mu.RUnlock()
|
||||
|
||||
file, err := os.Open(l.parametersPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to open parameters file: %w", err)
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
var params []string
|
||||
decoder := json.NewDecoder(file)
|
||||
if err := decoder.Decode(¶ms); err != nil {
|
||||
return nil, fmt.Errorf("failed to decode parameters: %w", err)
|
||||
}
|
||||
|
||||
l.mu.Lock()
|
||||
l.paramsCache = params
|
||||
l.mu.Unlock()
|
||||
|
||||
return params, nil
|
||||
}
|
||||
|
||||
// StreamingSearch performs memory-efficient search across all brands
|
||||
func (l *Loader) StreamingSearch(searchFunc func(*models.Camera) bool) ([]*models.Camera, error) {
|
||||
files, err := os.ReadDir(l.brandsPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read brands directory: %w", err)
|
||||
}
|
||||
|
||||
var results []*models.Camera
|
||||
for _, file := range files {
|
||||
if file.IsDir() || !strings.HasSuffix(file.Name(), ".json") {
|
||||
continue
|
||||
}
|
||||
|
||||
filePath := filepath.Join(l.brandsPath, file.Name())
|
||||
camera, err := l.loadCameraFromFile(filePath)
|
||||
if err != nil {
|
||||
l.logger.Error("failed to load camera file", err, "file", file.Name())
|
||||
continue
|
||||
}
|
||||
|
||||
if searchFunc(camera) {
|
||||
results = append(results, camera)
|
||||
}
|
||||
}
|
||||
|
||||
return results, nil
|
||||
}
|
||||
|
||||
// loadCameraFromFile loads a camera from a file without caching
|
||||
func (l *Loader) loadCameraFromFile(filePath string) (*models.Camera, error) {
|
||||
file, err := os.Open(filePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
var camera models.Camera
|
||||
decoder := json.NewDecoder(file)
|
||||
if err := decoder.Decode(&camera); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &camera, nil
|
||||
}
|
||||
|
||||
// GetEntriesForModels returns all entries for specific models with similarity threshold
|
||||
func (l *Loader) GetEntriesForModels(modelNames []string, similarityThreshold float64) ([]models.CameraEntry, error) {
|
||||
entriesMap := make(map[string]models.CameraEntry)
|
||||
|
||||
for _, modelName := range modelNames {
|
||||
// Search for similar models across all brands
|
||||
cameras, err := l.StreamingSearch(func(camera *models.Camera) bool {
|
||||
for _, entry := range camera.Entries {
|
||||
for _, model := range entry.Models {
|
||||
similarity := calculateSimilarity(modelName, model)
|
||||
if similarity >= similarityThreshold {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Collect unique entries
|
||||
for _, camera := range cameras {
|
||||
for _, entry := range camera.Entries {
|
||||
for _, model := range entry.Models {
|
||||
similarity := calculateSimilarity(modelName, model)
|
||||
if similarity >= similarityThreshold {
|
||||
// Create unique key for deduplication
|
||||
key := fmt.Sprintf("%s://%d/%s", entry.Protocol, entry.Port, entry.URL)
|
||||
entriesMap[key] = entry
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Convert map to slice
|
||||
var entries []models.CameraEntry
|
||||
for _, entry := range entriesMap {
|
||||
entries = append(entries, entry)
|
||||
}
|
||||
|
||||
return entries, nil
|
||||
}
|
||||
|
||||
// calculateSimilarity calculates similarity between two strings (0.0 to 1.0)
|
||||
func calculateSimilarity(s1, s2 string) float64 {
|
||||
s1 = strings.ToLower(s1)
|
||||
s2 = strings.ToLower(s2)
|
||||
|
||||
if s1 == s2 {
|
||||
return 1.0
|
||||
}
|
||||
|
||||
// Simple Levenshtein-based similarity
|
||||
maxLen := max(len(s1), len(s2))
|
||||
if maxLen == 0 {
|
||||
return 1.0
|
||||
}
|
||||
|
||||
distance := levenshteinDistance(s1, s2)
|
||||
return 1.0 - float64(distance)/float64(maxLen)
|
||||
}
|
||||
|
||||
// levenshteinDistance calculates the Levenshtein distance between two strings
|
||||
func levenshteinDistance(s1, s2 string) int {
|
||||
if len(s1) == 0 {
|
||||
return len(s2)
|
||||
}
|
||||
if len(s2) == 0 {
|
||||
return len(s1)
|
||||
}
|
||||
|
||||
matrix := make([][]int, len(s1)+1)
|
||||
for i := range matrix {
|
||||
matrix[i] = make([]int, len(s2)+1)
|
||||
matrix[i][0] = i
|
||||
}
|
||||
for j := range matrix[0] {
|
||||
matrix[0][j] = j
|
||||
}
|
||||
|
||||
for i := 1; i <= len(s1); i++ {
|
||||
for j := 1; j <= len(s2); j++ {
|
||||
cost := 0
|
||||
if s1[i-1] != s2[j-1] {
|
||||
cost = 1
|
||||
}
|
||||
matrix[i][j] = min(
|
||||
matrix[i-1][j]+1,
|
||||
matrix[i][j-1]+1,
|
||||
matrix[i-1][j-1]+cost,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
return matrix[len(s1)][len(s2)]
|
||||
}
|
||||
|
||||
func min(values ...int) int {
|
||||
minVal := values[0]
|
||||
for _, v := range values[1:] {
|
||||
if v < minVal {
|
||||
minVal = v
|
||||
}
|
||||
}
|
||||
return minVal
|
||||
}
|
||||
|
||||
func max(a, b int) int {
|
||||
if a > b {
|
||||
return a
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
||||
// ClearCache clears the internal caches
|
||||
func (l *Loader) ClearCache() {
|
||||
l.mu.Lock()
|
||||
defer l.mu.Unlock()
|
||||
|
||||
l.brandsCache = make(map[string]*models.Camera)
|
||||
l.patternsCache = nil
|
||||
l.paramsCache = nil
|
||||
}
|
||||
@@ -0,0 +1,359 @@
|
||||
package database
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/lithammer/fuzzysearch/fuzzy"
|
||||
"github.com/strix-project/strix/internal/models"
|
||||
)
|
||||
|
||||
// SearchEngine handles intelligent camera searching
|
||||
type SearchEngine struct {
|
||||
loader *Loader
|
||||
logger interface{ Debug(string, ...any); Error(string, error, ...any) }
|
||||
mu sync.RWMutex
|
||||
}
|
||||
|
||||
// NewSearchEngine creates a new search engine
|
||||
func NewSearchEngine(loader *Loader, logger interface{ Debug(string, ...any); Error(string, error, ...any) }) *SearchEngine {
|
||||
return &SearchEngine{
|
||||
loader: loader,
|
||||
logger: logger,
|
||||
}
|
||||
}
|
||||
|
||||
// SearchResult represents a single search result with score
|
||||
type SearchResult struct {
|
||||
Camera *models.Camera
|
||||
Score float64
|
||||
}
|
||||
|
||||
// Search performs intelligent camera search
|
||||
func (s *SearchEngine) Search(query string, limit int) (*models.CameraSearchResponse, error) {
|
||||
if limit <= 0 {
|
||||
limit = 10
|
||||
}
|
||||
|
||||
// Normalize query
|
||||
normalizedQuery := s.normalizeQuery(query)
|
||||
tokens := s.tokenizeQuery(normalizedQuery)
|
||||
|
||||
s.logger.Debug("searching cameras", "query", query, "normalized", normalizedQuery, "tokens", tokens)
|
||||
|
||||
// Extract potential brand and model
|
||||
brandToken, modelTokens := s.extractBrandModel(tokens)
|
||||
|
||||
// Perform search
|
||||
results, err := s.performSearch(brandToken, modelTokens, normalizedQuery)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("search failed: %w", err)
|
||||
}
|
||||
|
||||
// Sort by score
|
||||
sort.Slice(results, func(i, j int) bool {
|
||||
return results[i].Score > results[j].Score
|
||||
})
|
||||
|
||||
// Apply limit
|
||||
if len(results) > limit {
|
||||
results = results[:limit]
|
||||
}
|
||||
|
||||
// Convert to response
|
||||
cameras := make([]models.Camera, len(results))
|
||||
for i, result := range results {
|
||||
cameras[i] = *result.Camera
|
||||
cameras[i].MatchScore = result.Score
|
||||
}
|
||||
|
||||
return &models.CameraSearchResponse{
|
||||
Cameras: cameras,
|
||||
Total: len(results),
|
||||
Returned: len(cameras),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// normalizeQuery normalizes the search query
|
||||
func (s *SearchEngine) normalizeQuery(query string) string {
|
||||
// Convert to lowercase
|
||||
normalized := strings.ToLower(query)
|
||||
|
||||
// Remove multiple spaces
|
||||
normalized = regexp.MustCompile(`\s+`).ReplaceAllString(normalized, " ")
|
||||
|
||||
// Remove special characters but keep spaces
|
||||
normalized = regexp.MustCompile(`[^a-z0-9\s\-]`).ReplaceAllString(normalized, " ")
|
||||
|
||||
// Trim spaces
|
||||
normalized = strings.TrimSpace(normalized)
|
||||
|
||||
return normalized
|
||||
}
|
||||
|
||||
// tokenizeQuery splits query into tokens
|
||||
func (s *SearchEngine) tokenizeQuery(query string) []string {
|
||||
// Split by spaces and filter empty tokens
|
||||
tokens := strings.Fields(query)
|
||||
|
||||
var result []string
|
||||
for _, token := range tokens {
|
||||
if token != "" {
|
||||
result = append(result, token)
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// extractBrandModel attempts to extract brand and model from tokens
|
||||
func (s *SearchEngine) extractBrandModel(tokens []string) (string, []string) {
|
||||
if len(tokens) == 0 {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
// First token is likely the brand
|
||||
brandToken := tokens[0]
|
||||
|
||||
// Rest are model tokens
|
||||
var modelTokens []string
|
||||
if len(tokens) > 1 {
|
||||
modelTokens = tokens[1:]
|
||||
}
|
||||
|
||||
return brandToken, modelTokens
|
||||
}
|
||||
|
||||
// performSearch executes the actual search
|
||||
func (s *SearchEngine) performSearch(brandToken string, modelTokens []string, fullQuery string) ([]SearchResult, error) {
|
||||
var results []SearchResult
|
||||
var mu sync.Mutex
|
||||
var wg sync.WaitGroup
|
||||
|
||||
// Get all brands
|
||||
brands, err := s.loader.ListBrands()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Search in parallel with limited concurrency
|
||||
sem := make(chan struct{}, 10) // Limit to 10 concurrent searches
|
||||
|
||||
for _, brandID := range brands {
|
||||
wg.Add(1)
|
||||
go func(brandID string) {
|
||||
defer wg.Done()
|
||||
sem <- struct{}{}
|
||||
defer func() { <-sem }()
|
||||
|
||||
// Calculate brand match score
|
||||
brandScore := s.calculateBrandScore(brandID, brandToken)
|
||||
|
||||
// Skip if brand score is too low
|
||||
if brandScore < 0.3 {
|
||||
return
|
||||
}
|
||||
|
||||
// Load brand data
|
||||
camera, err := s.loader.LoadBrand(brandID)
|
||||
if err != nil {
|
||||
s.logger.Error("failed to load brand", err, "brand", brandID)
|
||||
return
|
||||
}
|
||||
|
||||
// Calculate model scores for entries
|
||||
maxModelScore := 0.0
|
||||
for _, entry := range camera.Entries {
|
||||
for _, model := range entry.Models {
|
||||
modelScore := s.calculateModelScore(model, modelTokens, fullQuery)
|
||||
if modelScore > maxModelScore {
|
||||
maxModelScore = modelScore
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate final score
|
||||
finalScore := s.calculateFinalScore(brandScore, maxModelScore)
|
||||
|
||||
// Add to results if score is high enough
|
||||
if finalScore >= 0.3 {
|
||||
mu.Lock()
|
||||
results = append(results, SearchResult{
|
||||
Camera: camera,
|
||||
Score: finalScore,
|
||||
})
|
||||
mu.Unlock()
|
||||
}
|
||||
}(brandID)
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
return results, nil
|
||||
}
|
||||
|
||||
// calculateBrandScore calculates how well a brand matches
|
||||
func (s *SearchEngine) calculateBrandScore(brandID, brandToken string) float64 {
|
||||
brandID = strings.ToLower(brandID)
|
||||
brandToken = strings.ToLower(brandToken)
|
||||
|
||||
// Exact match
|
||||
if brandID == brandToken {
|
||||
return 1.0
|
||||
}
|
||||
|
||||
// Remove hyphens for comparison
|
||||
brandIDClean := strings.ReplaceAll(brandID, "-", "")
|
||||
brandTokenClean := strings.ReplaceAll(brandToken, "-", "")
|
||||
|
||||
if brandIDClean == brandTokenClean {
|
||||
return 0.95
|
||||
}
|
||||
|
||||
// Check if brand starts with token
|
||||
if strings.HasPrefix(brandID, brandToken) || strings.HasPrefix(brandIDClean, brandTokenClean) {
|
||||
return 0.85
|
||||
}
|
||||
|
||||
// Check if token is contained in brand
|
||||
if strings.Contains(brandID, brandToken) || strings.Contains(brandIDClean, brandTokenClean) {
|
||||
return 0.75
|
||||
}
|
||||
|
||||
// Fuzzy match
|
||||
if fuzzy.Match(brandToken, brandID) {
|
||||
return 0.6
|
||||
}
|
||||
|
||||
// Calculate similarity
|
||||
similarity := calculateSimilarity(brandID, brandToken)
|
||||
return similarity * 0.5
|
||||
}
|
||||
|
||||
// calculateModelScore calculates how well a model matches
|
||||
func (s *SearchEngine) calculateModelScore(model string, modelTokens []string, fullQuery string) float64 {
|
||||
model = strings.ToLower(model)
|
||||
fullQuery = strings.ToLower(fullQuery)
|
||||
|
||||
// Check if full query matches the model
|
||||
if model == fullQuery {
|
||||
return 1.0
|
||||
}
|
||||
|
||||
// Check if model contains all tokens
|
||||
modelNormalized := s.normalizeQuery(model)
|
||||
allTokensFound := true
|
||||
tokenMatchScore := 0.0
|
||||
|
||||
for _, token := range modelTokens {
|
||||
if strings.Contains(modelNormalized, token) {
|
||||
tokenMatchScore += 0.2
|
||||
} else {
|
||||
allTokensFound = false
|
||||
}
|
||||
}
|
||||
|
||||
if allTokensFound && len(modelTokens) > 0 {
|
||||
return 0.8 + tokenMatchScore/float64(len(modelTokens))*0.2
|
||||
}
|
||||
|
||||
// Fuzzy match on full model
|
||||
modelCombined := strings.Join(modelTokens, "")
|
||||
if fuzzy.Match(modelCombined, modelNormalized) {
|
||||
return 0.6
|
||||
}
|
||||
|
||||
// Calculate similarity
|
||||
similarity := calculateSimilarity(modelNormalized, strings.Join(modelTokens, " "))
|
||||
return similarity * 0.5
|
||||
}
|
||||
|
||||
// calculateFinalScore combines brand and model scores
|
||||
func (s *SearchEngine) calculateFinalScore(brandScore, modelScore float64) float64 {
|
||||
// If we have both brand and model matches
|
||||
if brandScore > 0 && modelScore > 0 {
|
||||
// Weighted average: brand 30%, model 70%
|
||||
return brandScore*0.3 + modelScore*0.7
|
||||
}
|
||||
|
||||
// If only brand matches
|
||||
if brandScore > 0 {
|
||||
return brandScore * 0.5
|
||||
}
|
||||
|
||||
// If only model matches
|
||||
return modelScore * 0.5
|
||||
}
|
||||
|
||||
// SearchByModel searches for cameras by model name with fuzzy matching
|
||||
func (s *SearchEngine) SearchByModel(modelName string, similarityThreshold float64, limit int) ([]models.Camera, error) {
|
||||
if similarityThreshold <= 0 {
|
||||
similarityThreshold = 0.8
|
||||
}
|
||||
if limit <= 0 {
|
||||
limit = 6
|
||||
}
|
||||
|
||||
normalizedModel := s.normalizeQuery(modelName)
|
||||
var results []SearchResult
|
||||
|
||||
// Search through all brands
|
||||
cameras, err := s.loader.StreamingSearch(func(camera *models.Camera) bool {
|
||||
maxScore := 0.0
|
||||
for _, entry := range camera.Entries {
|
||||
for _, model := range entry.Models {
|
||||
normalizedEntryModel := s.normalizeQuery(model)
|
||||
similarity := calculateSimilarity(normalizedModel, normalizedEntryModel)
|
||||
|
||||
// Also check fuzzy match
|
||||
if fuzzy.Match(normalizedModel, normalizedEntryModel) {
|
||||
if similarity < 0.7 {
|
||||
similarity = 0.7
|
||||
}
|
||||
}
|
||||
|
||||
if similarity > maxScore {
|
||||
maxScore = similarity
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if maxScore >= similarityThreshold {
|
||||
camera.MatchScore = maxScore
|
||||
return true
|
||||
}
|
||||
return false
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Convert to SearchResult for sorting
|
||||
for _, camera := range cameras {
|
||||
results = append(results, SearchResult{
|
||||
Camera: camera,
|
||||
Score: camera.MatchScore,
|
||||
})
|
||||
}
|
||||
|
||||
// Sort by score
|
||||
sort.Slice(results, func(i, j int) bool {
|
||||
return results[i].Score > results[j].Score
|
||||
})
|
||||
|
||||
// Apply limit
|
||||
if len(results) > limit {
|
||||
results = results[:limit]
|
||||
}
|
||||
|
||||
// Convert back to Camera slice
|
||||
var finalCameras []models.Camera
|
||||
for _, result := range results {
|
||||
finalCameras = append(finalCameras, *result.Camera)
|
||||
}
|
||||
|
||||
return finalCameras, nil
|
||||
}
|
||||
Reference in New Issue
Block a user