package recconf import ( "encoding/json" "reflect" "gitlab.alibaba-inc.com/pai_biz_arch/pairec/config" ) var ( Config *RecommendConfig adapterName = "json" ) const ( DaoConf_Adapter_Mysql = "mysql" DaoConf_Adapter_Redis = "redis" DaoConf_Adapter_TableStore = "tablestore" DaoConf_Adapter_HBase = "hbase" DaoConf_Adapter_Hologres = "hologres" DataSource_Type_Kafka = "kafka" DataSource_Type_Datahub = "datahub" ) func init() { Config = newRecommendConfig() } type RecommendConfig struct { RunMode string // Run Mode: daily | product Region string ListenConf ListenConfig FeatureConfs map[string]SceneFeatureConfig SortNames map[string][]string FilterNames map[string][]string AlgoConfs []AlgoConfig RecallConfs []RecallConfig FilterConfs []FilterConfig RedisConfs map[string]RedisConfig MysqlConfs map[string]MysqlConfig HologresConfs map[string]HologresConfig KafkaConfs map[string]KafkaConfig DatahubConfs map[string]DatahubConfig HBaseConfs map[string]HBaseConfig TableStoreConfs map[string]TableStoreConfig SceneConfs map[string]map[string]CategoryConfig RankConf map[string]RankConfig LogConf LogConfig ABTestConf ABTestConfig CallBackConfs map[string]CallBackConfig GeneralRankConfs map[string]GeneralRankConfig ColdStartRankConfs map[string]ColdStartRankConfig DPPConf []DPPSortConfig } type ListenConfig struct { HttpAddr string HttpPort int } type DaoConfig struct { Adapter string AdapterType string RedisName string RedisPrefix string RedisDefaultKey string MysqlName string MysqlTable string Config string TableStoreName string TableStoreTableName string HBasePrefix string HBaseName string HBaseTable string ColumnFamily string Qualifier string // hologres HologresName string HologresTableName string } type SceneFeatureConfig struct { FeatureLoadConfs []FeatureLoadConfig AsynLoadFeature bool } type FeatureLoadConfig struct { FeatureDaoConf FeatureDaoConfig Features []FeatureConfig } type FeatureDaoConfig struct { DaoConfig FeatureKey string FeatureStore string // user or item UserFeatureKeyName string ItemFeatureKeyName string UserSelectFields string ItemSelectFields string FeatureType string // per feature type has different way of build SequenceLength int SequenceName string SequenceEvent string SequenceDelim string // SequencePlayTime filter event by as least play time // like play event need to large than 10s, so set value is "play:10000", timeunit is ms // if has more than one event to filter, use ';' as delim , like "play:10000;read:50000" SequencePlayTime string SequenceOfflineTableName string // SequenceDimFields fetch other dimension fields from db SequenceDimFields string } type FeatureConfig struct { FeatureType string FeatureName string FeatureSource string FeatureStore string // user or item RemoveFeatureSource bool // delete feature source Normalizer string } type AlgoConfig struct { Name string Type string EasConf EasConfig VectorConf VectorConfig LookupConf LookupConfig SeldonConf SeldonConfig } type LookupConfig struct { FieldName string } type EasConfig struct { Processor string Url string Auth string SignatureName string Timeout int RetryTimes int ResponseFuncName string Outputs []string } type SeldonConfig struct { Url string ResponseFuncName string } type VectorConfig struct { ServerAddress string Timeout int64 } type RecallConfig struct { Name string RecallType string RecallCount int RecallAlgo string ItemType string CacheAdapter string CacheConfig string CachePrefix string CacheTime int // cache time by seconds Triggers []TriggerConfig HologresVectorConf HologresVectorConfig UserCollaborativeDaoConf UserCollaborativeDaoConfig ItemCollaborativeDaoConf ItemCollaborativeDaoConfig User2ItemDaoConf User2ItemDaoConfig UserTopicDaoConf UserTopicDaoConfig DaoConf DaoConfig VectorDaoConf VectorDaoConfig ColdStartDaoConf ColdStartDaoConfig } type ColdStartDaoConfig struct { SqlDaoConfig TimeInterval int // second } type SqlDaoConfig struct { DaoConfig WhereClause string PrimaryKey string SelectFields string } type HologresVectorConfig struct { VectorTable string // example: "item_emb_{partition}", '{partition}' will be replaced by partition info VectorKeyField string VectorEmbeddingField string } type UserCollaborativeDaoConfig struct { DaoConfig User2ItemTable string Item2ItemTable string } type ItemCollaborativeDaoConfig struct { DaoConfig Item2ItemTable string } type User2ItemDaoConfig struct { DaoConfig User2ItemTable string Item2ItemTable string } type UserTopicDaoConfig struct { DaoConfig UserTopicTable string TopicItemTable string } type VectorDaoConfig struct { DaoConfig EmbeddingField string KeyField string // set the following fields to get partition info, // if not set, '{partition}' in table name won't be replaced (if it exists) PartitionInfoTable string PartitionInfoField string } type RedisConfig struct { Host string Port int Password string DbNum int MaxIdle int ConnectTimeout int ReadTimeout int WriteTimeout int } type MysqlConfig struct { DSN string } type HologresConfig struct { DSN string } type KafkaConfig struct { BootstrapServers string Topic string } type DatahubConfig struct { AccessId string AccessKey string Endpoint string ProjectName string TopicName string } type HBaseConfig struct { ZKQuorum string } type TableStoreConfig struct { EndPoint string InstanceName string AccessKeyId string AccessKeySecret string RoleArn string } type SceneConfig struct { Categories []string } type CategoryConfig struct { RecallNames []string AlwaysRank bool // when recall size is not enough, also need to rank } type RankConfig struct { RankAlgoList []string RankScore string Processor string BatchCount int } type OperatorValueConfig struct { Type string // "property", "function" Name string From string // item or user } type LogConfig struct { RetensionDays int DiskSize int // unit : G, if value = 20, the true size is 20G LogLevel string // valid value is DEBUG, INFO , ERROR , FATAL Output string // valid value is file, console } type ABTestConfig struct { Host string Token string } type FilterConfig struct { Name string FilterType string DaoConf DaoConfig MaxItems int TimeInterval int // second RetainNum int WriteLog bool WriteLogExcludeScenes []string GenerateItemDataFuncName string AdjustCountConfs []AdjustCountConfig ItemStateDaoConf ItemStateDaoConfig FilterEvaluableExpression string FilterParams []FilterParamConfig } type FilterParamConfig struct { Name string Operator string Type string // string, int, int64 Value interface{} } type ItemStateDaoConfig struct { DaoConfig ItemFieldName string WhereClause string SelectFields string } type AdjustCountConfig struct { RecallName string Count int Type string } type CallBackConfig struct { DataSource DataSourceConfig } type GeneralRankConfig struct { FeatureLoadConfs []FeatureLoadConfig RankConf RankConfig RetainNum int BatchCount int } type ColdStartRankConfig struct { RecallName string AlgoName string } type DataSourceConfig struct { Name string Type string } type TriggerConfig struct { TriggerKey string DefaultValue string Boundaries []int } type DPPSortConfig struct { Name string DaoConf DaoConfig TableName string TableSuffixParam string TablePKey string EmbeddingColumn string EmbeddingSeparator string Alpha float64 CacheTimeInMinutes int EmbeddingHookNames []string NormalizeEmb string WindowSize int EmbMissedThreshold float64 FilterRetrieveIds []string EnsurePositiveSim string } func newRecommendConfig() *RecommendConfig { conf := RecommendConfig{ RunMode: "daily", ListenConf: ListenConfig{ HttpAddr: "", HttpPort: 80, }, // SortNames: []string{"item_score"}, } return &conf } func CopyConfig(src, dst *RecommendConfig, filters ...func(string) bool) { srcVal := reflect.ValueOf(src).Elem() srcType := reflect.TypeOf(src).Elem() dstVal := reflect.ValueOf(dst).Elem() numOfFields := srcVal.NumField() for i := 0; i < numOfFields; i++ { fieldType := srcType.Field(i) flag := true for _, filter := range filters { flag = filter(fieldType.Name) if flag == false { break } } if flag == false { continue } elemField := dstVal.FieldByName(fieldType.Name) if elemField.CanSet() { fieldVal := srcVal.Field(i) elemField.Set(fieldVal) } } } // parammeter: // filePath: config file path func Load(filePath string) error { configer, err := config.NewConfig(adapterName, filePath) if err != nil { return err } rawdata := configer.RawData() err = json.Unmarshal(rawdata, Config) if err != nil { return err } return nil } /** func loadConfig(filePath string) error { content, err := ioutil.ReadFile(filePath) if err != nil { return err } if len(content) == 0 { return errors.New("config content empty") } conf := RecommendConfig{} err = json.Unmarshal(content, &conf) if err != nil { return err } Config = &conf log.Info("load config success") return nil } **/