config: remove global option structs

master
Oliver Tonnhofer 2018-06-07 20:23:06 +02:00
parent 8c526b0543
commit 6054ef6a34
6 changed files with 201 additions and 187 deletions

View File

@ -46,25 +46,25 @@ func Main(usage func()) {
switch os.Args[1] {
case "import":
config.ParseImport(os.Args[2:])
if config.BaseOptions.Httpprofile != "" {
stats.StartHttpPProf(config.BaseOptions.Httpprofile)
opts := config.ParseImport(os.Args[2:])
if opts.Base.Httpprofile != "" {
stats.StartHttpPProf(opts.Base.Httpprofile)
}
import_.Import()
import_.Import(opts)
case "diff":
config.ParseDiffImport(os.Args[2:])
opts, files := config.ParseDiffImport(os.Args[2:])
if config.BaseOptions.Httpprofile != "" {
stats.StartHttpPProf(config.BaseOptions.Httpprofile)
if opts.Httpprofile != "" {
stats.StartHttpPProf(opts.Httpprofile)
}
update.Diff()
update.Diff(opts, files)
case "run":
config.ParseRunImport(os.Args[2:])
opts := config.ParseRunImport(os.Args[2:])
if config.BaseOptions.Httpprofile != "" {
stats.StartHttpPProf(config.BaseOptions.Httpprofile)
if opts.Httpprofile != "" {
stats.StartHttpPProf(opts.Httpprofile)
}
update.Run()
update.Run(opts)
case "query-cache":
query.Query(os.Args[2:])
case "version":

View File

@ -38,11 +38,7 @@ const defaultSchemaImport = "import"
const defaultSchemaProduction = "public"
const defaultSchemaBackup = "backup"
var ImportFlags = flag.NewFlagSet("import", flag.ExitOnError)
var DiffFlags = flag.NewFlagSet("diff", flag.ExitOnError)
var RunFlags = flag.NewFlagSet("run", flag.ExitOnError)
type _BaseOptions struct {
type Base struct {
Connection string
CacheDir string
DiffDir string
@ -61,7 +57,7 @@ type _BaseOptions struct {
DiffStateBefore time.Duration
}
func (o *_BaseOptions) updateFromConfig() error {
func (o *Base) updateFromConfig() error {
conf := &Config{
CacheDir: defaultCacheDir,
Srid: defaultSrid,
@ -149,7 +145,7 @@ func (o *_BaseOptions) updateFromConfig() error {
return nil
}
func (o *_BaseOptions) check() []error {
func (o *Base) check() []error {
errs := []error{}
if o.Srid != 3857 && o.Srid != 4326 {
errs = append(errs, errors.New("only -srid=3857 or -srid=4326 are supported"))
@ -160,7 +156,8 @@ func (o *_BaseOptions) check() []error {
return errs
}
type _ImportOptions struct {
type Import struct {
Base Base
Overwritecache bool
Appendcache bool
Read string
@ -172,129 +169,137 @@ type _ImportOptions struct {
RemoveBackup bool
}
var BaseOptions = _BaseOptions{}
var ImportOptions = _ImportOptions{}
func addBaseFlags(flags *flag.FlagSet) {
flags.StringVar(&BaseOptions.Connection, "connection", "", "connection parameters")
flags.StringVar(&BaseOptions.CacheDir, "cachedir", defaultCacheDir, "cache directory")
flags.StringVar(&BaseOptions.DiffDir, "diffdir", "", "diff directory for last.state.txt")
flags.StringVar(&BaseOptions.MappingFile, "mapping", "", "mapping file")
flags.IntVar(&BaseOptions.Srid, "srid", defaultSrid, "srs id")
flags.StringVar(&BaseOptions.LimitTo, "limitto", "", "limit to geometries")
flags.Float64Var(&BaseOptions.LimitToCacheBuffer, "limittocachebuffer", 0.0, "limit to buffer for cache")
flags.StringVar(&BaseOptions.ConfigFile, "config", "", "config (json)")
flags.StringVar(&BaseOptions.Httpprofile, "httpprofile", "", "bind address for profile server")
flags.BoolVar(&BaseOptions.Quiet, "quiet", false, "quiet log output")
flags.StringVar(&BaseOptions.Schemas.Import, "dbschema-import", defaultSchemaImport, "db schema for imports")
flags.StringVar(&BaseOptions.Schemas.Production, "dbschema-production", defaultSchemaProduction, "db schema for production")
flags.StringVar(&BaseOptions.Schemas.Backup, "dbschema-backup", defaultSchemaBackup, "db schema for backups")
func addBaseFlags(opts *Base, flags *flag.FlagSet) {
flags.StringVar(&opts.Connection, "connection", "", "connection parameters")
flags.StringVar(&opts.CacheDir, "cachedir", defaultCacheDir, "cache directory")
flags.StringVar(&opts.DiffDir, "diffdir", "", "diff directory for last.state.txt")
flags.StringVar(&opts.MappingFile, "mapping", "", "mapping file")
flags.IntVar(&opts.Srid, "srid", defaultSrid, "srs id")
flags.StringVar(&opts.LimitTo, "limitto", "", "limit to geometries")
flags.Float64Var(&opts.LimitToCacheBuffer, "limittocachebuffer", 0.0, "limit to buffer for cache")
flags.StringVar(&opts.ConfigFile, "config", "", "config (json)")
flags.StringVar(&opts.Httpprofile, "httpprofile", "", "bind address for profile server")
flags.BoolVar(&opts.Quiet, "quiet", false, "quiet log output")
flags.StringVar(&opts.Schemas.Import, "dbschema-import", defaultSchemaImport, "db schema for imports")
flags.StringVar(&opts.Schemas.Production, "dbschema-production", defaultSchemaProduction, "db schema for production")
flags.StringVar(&opts.Schemas.Backup, "dbschema-backup", defaultSchemaBackup, "db schema for backups")
}
func UsageImport() {
fmt.Fprintf(os.Stderr, "Usage: %s %s [args]\n\n", os.Args[0], os.Args[1])
ImportFlags.PrintDefaults()
os.Exit(2)
}
func ParseImport(args []string) Import {
flags := flag.NewFlagSet("import", flag.ExitOnError)
opts := Import{}
func UsageDiff() {
fmt.Fprintf(os.Stderr, "Usage: %s %s [args] [.osc.gz, ...]\n\n", os.Args[0], os.Args[1])
DiffFlags.PrintDefaults()
os.Exit(2)
}
addBaseFlags(&opts.Base, flags)
flags.BoolVar(&opts.Overwritecache, "overwritecache", false, "overwritecache")
flags.BoolVar(&opts.Appendcache, "appendcache", false, "append cache")
flags.StringVar(&opts.Read, "read", "", "read")
flags.BoolVar(&opts.Write, "write", false, "write")
flags.BoolVar(&opts.Optimize, "optimize", false, "optimize")
flags.BoolVar(&opts.Diff, "diff", false, "enable diff support")
flags.BoolVar(&opts.DeployProduction, "deployproduction", false, "deploy production")
flags.BoolVar(&opts.RevertDeploy, "revertdeploy", false, "revert deploy to production")
flags.BoolVar(&opts.RemoveBackup, "removebackup", false, "remove backups from deploy")
flags.DurationVar(&opts.Base.DiffStateBefore, "diff-state-before", 0, "set initial diff sequence before")
func UsageRun() {
fmt.Fprintf(os.Stderr, "Usage: %s %s [args] [.osc.gz, ...]\n\n", os.Args[0], os.Args[1])
DiffFlags.PrintDefaults()
os.Exit(2)
}
flags.Usage = func() {
fmt.Fprintf(os.Stderr, "Usage: %s %s [args]\n\n", os.Args[0], os.Args[1])
flags.PrintDefaults()
os.Exit(2)
}
func init() {
ImportFlags.Usage = UsageImport
DiffFlags.Usage = UsageDiff
RunFlags.Usage = UsageRun
addBaseFlags(DiffFlags)
addBaseFlags(ImportFlags)
addBaseFlags(RunFlags)
ImportFlags.BoolVar(&ImportOptions.Overwritecache, "overwritecache", false, "overwritecache")
ImportFlags.BoolVar(&ImportOptions.Appendcache, "appendcache", false, "append cache")
ImportFlags.StringVar(&ImportOptions.Read, "read", "", "read")
ImportFlags.BoolVar(&ImportOptions.Write, "write", false, "write")
ImportFlags.BoolVar(&ImportOptions.Optimize, "optimize", false, "optimize")
ImportFlags.BoolVar(&ImportOptions.Diff, "diff", false, "enable diff support")
ImportFlags.BoolVar(&ImportOptions.DeployProduction, "deployproduction", false, "deploy production")
ImportFlags.BoolVar(&ImportOptions.RevertDeploy, "revertdeploy", false, "revert deploy to production")
ImportFlags.BoolVar(&ImportOptions.RemoveBackup, "removebackup", false, "remove backups from deploy")
ImportFlags.DurationVar(&BaseOptions.DiffStateBefore, "diff-state-before", 0, "set initial diff sequence before")
DiffFlags.StringVar(&BaseOptions.ExpireTilesDir, "expiretiles-dir", "", "write expire tiles into dir")
DiffFlags.IntVar(&BaseOptions.ExpireTilesZoom, "expiretiles-zoom", 14, "write expire tiles in this zoom level")
RunFlags.StringVar(&BaseOptions.ExpireTilesDir, "expiretiles-dir", "", "write expire tiles into dir")
RunFlags.IntVar(&BaseOptions.ExpireTilesZoom, "expiretiles-zoom", 14, "write expire tiles in this zoom level")
RunFlags.DurationVar(&BaseOptions.ReplicationInterval, "replication-interval", time.Minute, "replication interval as duration (1m, 1h, 24h)")
}
func ParseImport(args []string) {
if len(args) == 0 {
UsageImport()
flags.Usage()
}
err := ImportFlags.Parse(args)
err := flags.Parse(args)
if err != nil {
log.Fatal(err)
}
err = BaseOptions.updateFromConfig()
err = opts.Base.updateFromConfig()
if err != nil {
log.Fatal(err)
}
errs := BaseOptions.check()
errs := opts.Base.check()
if len(errs) != 0 {
reportErrors(errs)
UsageImport()
flags.Usage()
}
return opts
}
func ParseDiffImport(args []string) {
func ParseDiffImport(args []string) (Base, []string) {
flags := flag.NewFlagSet("diff", flag.ExitOnError)
opts := Base{}
addBaseFlags(&opts, flags)
flags.StringVar(&opts.ExpireTilesDir, "expiretiles-dir", "", "write expire tiles into dir")
flags.IntVar(&opts.ExpireTilesZoom, "expiretiles-zoom", 14, "write expire tiles in this zoom level")
flags.Usage = func() {
fmt.Fprintf(os.Stderr, "Usage: %s %s [args] [.osc.gz, ...]\n\n", os.Args[0], os.Args[1])
flags.PrintDefaults()
os.Exit(2)
}
if len(args) == 0 {
UsageDiff()
flags.Usage()
}
err := DiffFlags.Parse(args)
err := flags.Parse(args)
if err != nil {
log.Fatal(err)
}
err = BaseOptions.updateFromConfig()
err = opts.updateFromConfig()
if err != nil {
log.Fatal(err)
}
errs := BaseOptions.check()
errs := opts.check()
if len(errs) != 0 {
reportErrors(errs)
UsageDiff()
flags.Usage()
}
return opts, flags.Args()
}
func ParseRunImport(args []string) {
func ParseRunImport(args []string) Base {
flags := flag.NewFlagSet("run", flag.ExitOnError)
opts := Base{}
addBaseFlags(&opts, flags)
flags.StringVar(&opts.ExpireTilesDir, "expiretiles-dir", "", "write expire tiles into dir")
flags.IntVar(&opts.ExpireTilesZoom, "expiretiles-zoom", 14, "write expire tiles in this zoom level")
flags.DurationVar(&opts.ReplicationInterval, "replication-interval", time.Minute, "replication interval as duration (1m, 1h, 24h)")
flags.Usage = func() {
fmt.Fprintf(os.Stderr, "Usage: %s %s [args] [.osc.gz, ...]\n\n", os.Args[0], os.Args[1])
flags.PrintDefaults()
os.Exit(2)
}
if len(args) == 0 {
UsageRun()
flags.Usage()
}
err := RunFlags.Parse(args)
err := flags.Parse(args)
if err != nil {
log.Fatal(err)
}
err = BaseOptions.updateFromConfig()
err = opts.updateFromConfig()
if err != nil {
log.Fatal(err)
}
errs := BaseOptions.check()
errs := opts.check()
if len(errs) != 0 {
reportErrors(errs)
UsageRun()
flags.Usage()
}
return opts
}
func reportErrors(errs []error) {

View File

@ -21,27 +21,28 @@ import (
var log = logging.NewLogger("")
func Import() {
if config.BaseOptions.Quiet {
func Import(importOpts config.Import) {
baseOpts := importOpts.Base
if baseOpts.Quiet {
logging.SetQuiet(true)
}
if (config.ImportOptions.Write || config.ImportOptions.Read != "") && (config.ImportOptions.RevertDeploy || config.ImportOptions.RemoveBackup) {
if (importOpts.Write || importOpts.Read != "") && (importOpts.RevertDeploy || importOpts.RemoveBackup) {
log.Fatal("-revertdeploy and -removebackup not compatible with -read/-write")
}
if config.ImportOptions.RevertDeploy && (config.ImportOptions.RemoveBackup || config.ImportOptions.DeployProduction) {
if importOpts.RevertDeploy && (importOpts.RemoveBackup || importOpts.DeployProduction) {
log.Fatal("-revertdeploy not compatible with -deployproduction/-removebackup")
}
var geometryLimiter *limit.Limiter
if (config.ImportOptions.Write || config.ImportOptions.Read != "") && config.BaseOptions.LimitTo != "" {
if (importOpts.Write || importOpts.Read != "") && baseOpts.LimitTo != "" {
var err error
step := log.StartStep("Reading limitto geometries")
geometryLimiter, err = limit.NewFromGeoJSON(
config.BaseOptions.LimitTo,
config.BaseOptions.LimitToCacheBuffer,
config.BaseOptions.Srid,
baseOpts.LimitTo,
baseOpts.LimitToCacheBuffer,
baseOpts.Srid,
)
if err != nil {
log.Fatal(err)
@ -49,23 +50,23 @@ func Import() {
log.StopStep(step)
}
tagmapping, err := mapping.FromFile(config.BaseOptions.MappingFile)
tagmapping, err := mapping.FromFile(baseOpts.MappingFile)
if err != nil {
log.Fatal("error in mapping file: ", err)
}
var db database.DB
if config.ImportOptions.Write || config.ImportOptions.DeployProduction || config.ImportOptions.RevertDeploy || config.ImportOptions.RemoveBackup || config.ImportOptions.Optimize {
if config.BaseOptions.Connection == "" {
if importOpts.Write || importOpts.DeployProduction || importOpts.RevertDeploy || importOpts.RemoveBackup || importOpts.Optimize {
if baseOpts.Connection == "" {
log.Fatal("missing connection option")
}
conf := database.Config{
ConnectionParams: config.BaseOptions.Connection,
Srid: config.BaseOptions.Srid,
ImportSchema: config.BaseOptions.Schemas.Import,
ProductionSchema: config.BaseOptions.Schemas.Production,
BackupSchema: config.BaseOptions.Schemas.Backup,
ConnectionParams: baseOpts.Connection,
Srid: baseOpts.Srid,
ImportSchema: baseOpts.Schemas.Import,
ProductionSchema: baseOpts.Schemas.Production,
BackupSchema: baseOpts.Schemas.Backup,
}
db, err = database.Open(conf, &tagmapping.Conf)
if err != nil {
@ -74,16 +75,16 @@ func Import() {
defer db.Close()
}
osmCache := cache.NewOSMCache(config.BaseOptions.CacheDir)
osmCache := cache.NewOSMCache(baseOpts.CacheDir)
if config.ImportOptions.Read != "" && osmCache.Exists() {
if config.ImportOptions.Overwritecache {
log.Printf("removing existing cache %s", config.BaseOptions.CacheDir)
if importOpts.Read != "" && osmCache.Exists() {
if importOpts.Overwritecache {
log.Printf("removing existing cache %s", baseOpts.CacheDir)
err := osmCache.Remove()
if err != nil {
log.Fatal("unable to remove cache:", err)
}
} else if !config.ImportOptions.Appendcache {
} else if !importOpts.Appendcache {
log.Fatal("cache already exists use -appendcache or -overwritecache")
}
}
@ -92,7 +93,7 @@ func Import() {
var elementCounts *stats.ElementCounts
if config.ImportOptions.Read != "" {
if importOpts.Read != "" {
step := log.StartStep("Reading OSM data")
err = osmCache.Open()
if err != nil {
@ -100,17 +101,17 @@ func Import() {
}
progress := stats.NewStatsReporter()
if !config.ImportOptions.Appendcache {
if !importOpts.Appendcache {
// enable optimization if we don't append to existing cache
osmCache.Coords.SetLinearImport(true)
}
readLimiter := geometryLimiter
if config.BaseOptions.LimitToCacheBuffer == 0.0 {
if baseOpts.LimitToCacheBuffer == 0.0 {
readLimiter = nil
}
err := reader.ReadPbf(config.ImportOptions.Read,
err := reader.ReadPbf(importOpts.Read,
osmCache,
progress,
tagmapping,
@ -124,13 +125,13 @@ func Import() {
elementCounts = progress.Stop()
osmCache.Close()
log.StopStep(step)
if config.ImportOptions.Diff {
diffstate, err := state.FromPbf(config.ImportOptions.Read, config.BaseOptions.DiffStateBefore, config.BaseOptions.ReplicationUrl, config.BaseOptions.ReplicationInterval)
if importOpts.Diff {
diffstate, err := state.FromPbf(importOpts.Read, baseOpts.DiffStateBefore, baseOpts.ReplicationUrl, baseOpts.ReplicationInterval)
if err != nil {
log.Print("error parsing diff state form PBF", err)
} else if diffstate != nil {
os.MkdirAll(config.BaseOptions.DiffDir, 0755)
err := state.WriteLastState(config.BaseOptions.DiffDir, diffstate)
os.MkdirAll(baseOpts.DiffDir, 0755)
err := state.WriteLastState(baseOpts.DiffDir, diffstate)
if err != nil {
log.Print("error writing last.state.txt: ", err)
}
@ -138,7 +139,7 @@ func Import() {
}
}
if config.ImportOptions.Write {
if importOpts.Write {
stepImport := log.StartStep("Importing OSM data")
stepWrite := log.StartStep("Writing OSM data")
progress := stats.NewStatsReporterWithEstimate(elementCounts)
@ -159,8 +160,8 @@ func Import() {
}
var diffCache *cache.DiffCache
if config.ImportOptions.Diff {
diffCache = cache.NewDiffCache(config.BaseOptions.CacheDir)
if importOpts.Diff {
diffCache = cache.NewDiffCache(baseOpts.CacheDir)
if err = diffCache.Remove(); err != nil {
log.Fatal(err)
}
@ -187,7 +188,7 @@ func Import() {
tagmapping.PolygonMatcher,
tagmapping.RelationMatcher,
tagmapping.RelationMemberMatcher,
config.BaseOptions.Srid)
baseOpts.Srid)
relWriter.SetLimiter(geometryLimiter)
relWriter.EnableConcurrent()
relWriter.Start()
@ -200,7 +201,7 @@ func Import() {
ways, db,
progress,
tagmapping.PolygonMatcher, tagmapping.LineStringMatcher,
config.BaseOptions.Srid)
baseOpts.Srid)
wayWriter.SetLimiter(geometryLimiter)
wayWriter.EnableConcurrent()
wayWriter.Start()
@ -211,7 +212,7 @@ func Import() {
nodeWriter := writer.NewNodeWriter(osmCache, nodes, db,
progress,
tagmapping.PointMatcher,
config.BaseOptions.Srid)
baseOpts.Srid)
nodeWriter.SetLimiter(geometryLimiter)
nodeWriter.EnableConcurrent()
nodeWriter.Start()
@ -225,7 +226,7 @@ func Import() {
progress.Stop()
if config.ImportOptions.Diff {
if importOpts.Diff {
diffCache.Close()
}
@ -249,7 +250,7 @@ func Import() {
log.StopStep(stepImport)
}
if config.ImportOptions.Optimize {
if importOpts.Optimize {
if db, ok := db.(database.Optimizer); ok {
if err := db.Optimize(); err != nil {
log.Fatal(err)
@ -259,7 +260,7 @@ func Import() {
}
}
if config.ImportOptions.DeployProduction {
if importOpts.DeployProduction {
if db, ok := db.(database.Deployer); ok {
if err := db.Deploy(); err != nil {
log.Fatal(err)
@ -269,7 +270,7 @@ func Import() {
}
}
if config.ImportOptions.RevertDeploy {
if importOpts.RevertDeploy {
if db, ok := db.(database.Deployer); ok {
if err := db.RevertDeploy(); err != nil {
log.Fatal(err)
@ -279,7 +280,7 @@ func Import() {
}
}
if config.ImportOptions.RemoveBackup {
if importOpts.RemoveBackup {
if db, ok := db.(database.Deployer); ok {
if err := db.RemoveBackup(); err != nil {
log.Fatal(err)

View File

@ -61,8 +61,8 @@ func (s *importTestSuite) importOsm(t *testing.T) {
"-removebackup=false",
}
config.ParseImport(importArgs)
import_.Import()
opts := config.ParseImport(importArgs)
import_.Import(opts)
}
func (s *importTestSuite) deployOsm(t *testing.T) {
@ -82,8 +82,8 @@ func (s *importTestSuite) deployOsm(t *testing.T) {
"-quiet",
}
config.ParseImport(importArgs)
import_.Import()
opts := config.ParseImport(importArgs)
import_.Import(opts)
}
func (s *importTestSuite) revertDeployOsm(t *testing.T) {
@ -105,8 +105,8 @@ func (s *importTestSuite) revertDeployOsm(t *testing.T) {
"-quiet",
}
config.ParseImport(importArgs)
import_.Import()
opts := config.ParseImport(importArgs)
import_.Import(opts)
}
func (s *importTestSuite) cache(t *testing.T) *cache.OSMCache {
@ -141,8 +141,8 @@ func (s *importTestSuite) removeBackupOsm(t *testing.T) {
"-quiet",
}
config.ParseImport(importArgs)
import_.Import()
opts := config.ParseImport(importArgs)
import_.Import(opts)
}
func (s *importTestSuite) updateOsm(t *testing.T, diffFile string) {
@ -157,8 +157,8 @@ func (s *importTestSuite) updateOsm(t *testing.T, diffFile string) {
args = append(args, "-expiretiles-dir", s.config.expireTileDir)
}
args = append(args, diffFile)
config.ParseDiffImport(args)
update.Diff()
opts, files := config.ParseDiffImport(args)
update.Diff(opts, files)
}
func (s *importTestSuite) dropSchemas() {

View File

@ -25,33 +25,33 @@ import (
var log = logging.NewLogger("diff")
func Diff() {
if config.BaseOptions.Quiet {
func Diff(baseOpts config.Base, files []string) {
if baseOpts.Quiet {
logging.SetQuiet(true)
}
var geometryLimiter *limit.Limiter
if config.BaseOptions.LimitTo != "" {
if baseOpts.LimitTo != "" {
var err error
step := log.StartStep("Reading limitto geometries")
geometryLimiter, err = limit.NewFromGeoJSON(
config.BaseOptions.LimitTo,
config.BaseOptions.LimitToCacheBuffer,
config.BaseOptions.Srid,
baseOpts.LimitTo,
baseOpts.LimitToCacheBuffer,
baseOpts.Srid,
)
if err != nil {
log.Fatal(err)
}
log.StopStep(step)
}
osmCache := cache.NewOSMCache(config.BaseOptions.CacheDir)
osmCache := cache.NewOSMCache(baseOpts.CacheDir)
err := osmCache.Open()
if err != nil {
log.Fatal("osm cache: ", err)
}
defer osmCache.Close()
diffCache := cache.NewDiffCache(config.BaseOptions.CacheDir)
diffCache := cache.NewDiffCache(baseOpts.CacheDir)
err = diffCache.Open()
if err != nil {
log.Fatal("diff cache: ", err)
@ -59,8 +59,8 @@ func Diff() {
var exp expire.Expireor
if config.BaseOptions.ExpireTilesDir != "" {
tileexpire := expire.NewTileList(config.BaseOptions.ExpireTilesZoom, config.BaseOptions.ExpireTilesDir)
if baseOpts.ExpireTilesDir != "" {
tileexpire := expire.NewTileList(baseOpts.ExpireTilesZoom, baseOpts.ExpireTilesDir)
exp = tileexpire
defer func() {
if err := tileexpire.Flush(); err != nil {
@ -69,8 +69,8 @@ func Diff() {
}()
}
for _, oscFile := range config.DiffFlags.Args() {
err := Update(oscFile, geometryLimiter, exp, osmCache, diffCache, false)
for _, oscFile := range files {
err := Update(baseOpts, oscFile, geometryLimiter, exp, osmCache, diffCache, false)
if err != nil {
osmCache.Close()
diffCache.Close()
@ -82,12 +82,20 @@ func Diff() {
diffCache.Close()
}
func Update(oscFile string, geometryLimiter *limit.Limiter, expireor expire.Expireor, osmCache *cache.OSMCache, diffCache *cache.DiffCache, force bool) error {
func Update(
baseOpts config.Base,
oscFile string,
geometryLimiter *limit.Limiter,
expireor expire.Expireor,
osmCache *cache.OSMCache,
diffCache *cache.DiffCache,
force bool,
) error {
state, err := diffstate.FromOscGz(oscFile)
if err != nil {
return err
}
lastState, err := diffstate.ParseLastState(config.BaseOptions.DiffDir)
lastState, err := diffstate.ParseLastState(baseOpts.DiffDir)
if err != nil {
log.Warn(err)
}
@ -106,18 +114,18 @@ func Update(oscFile string, geometryLimiter *limit.Limiter, expireor expire.Expi
return err
}
tagmapping, err := mapping.FromFile(config.BaseOptions.MappingFile)
tagmapping, err := mapping.FromFile(baseOpts.MappingFile)
if err != nil {
return err
}
dbConf := database.Config{
ConnectionParams: config.BaseOptions.Connection,
Srid: config.BaseOptions.Srid,
ConnectionParams: baseOpts.Connection,
Srid: baseOpts.Srid,
// we apply diff imports on the Production schema
ImportSchema: config.BaseOptions.Schemas.Production,
ProductionSchema: config.BaseOptions.Schemas.Production,
BackupSchema: config.BaseOptions.Schemas.Backup,
ImportSchema: baseOpts.Schemas.Production,
ProductionSchema: baseOpts.Schemas.Production,
BackupSchema: baseOpts.Schemas.Backup,
}
db, err := database.Open(dbConf, &tagmapping.Conf)
if err != nil {
@ -170,7 +178,7 @@ func Update(oscFile string, geometryLimiter *limit.Limiter, expireor expire.Expi
tagmapping.PolygonMatcher,
tagmapping.RelationMatcher,
tagmapping.RelationMemberMatcher,
config.BaseOptions.Srid)
baseOpts.Srid)
relWriter.SetLimiter(geometryLimiter)
relWriter.SetExpireor(expireor)
relWriter.Start()
@ -181,7 +189,7 @@ func Update(oscFile string, geometryLimiter *limit.Limiter, expireor expire.Expi
progress,
tagmapping.PolygonMatcher,
tagmapping.LineStringMatcher,
config.BaseOptions.Srid)
baseOpts.Srid)
wayWriter.SetLimiter(geometryLimiter)
wayWriter.SetExpireor(expireor)
wayWriter.Start()
@ -189,7 +197,7 @@ func Update(oscFile string, geometryLimiter *limit.Limiter, expireor expire.Expi
nodeWriter := writer.NewNodeWriter(osmCache, nodes, db,
progress,
tagmapping.PointMatcher,
config.BaseOptions.Srid)
baseOpts.Srid)
nodeWriter.SetLimiter(geometryLimiter)
nodeWriter.SetExpireor(expireor)
nodeWriter.Start()
@ -413,7 +421,7 @@ func Update(oscFile string, geometryLimiter *limit.Limiter, expireor expire.Expi
if lastState != nil {
state.Url = lastState.Url
}
err = diffstate.WriteLastState(config.BaseOptions.DiffDir, state)
err = diffstate.WriteLastState(baseOpts.DiffDir, state)
if err != nil {
log.Warn(err) // warn only
}

View File

@ -18,19 +18,19 @@ import (
var logger = logging.NewLogger("")
func Run() {
if config.BaseOptions.Quiet {
func Run(baseOpts config.Base) {
if baseOpts.Quiet {
logging.SetQuiet(true)
}
var geometryLimiter *limit.Limiter
if config.BaseOptions.LimitTo != "" {
if baseOpts.LimitTo != "" {
var err error
step := logger.StartStep("Reading limitto geometries")
geometryLimiter, err = limit.NewFromGeoJSON(
config.BaseOptions.LimitTo,
config.BaseOptions.LimitToCacheBuffer,
config.BaseOptions.Srid,
baseOpts.LimitTo,
baseOpts.LimitToCacheBuffer,
baseOpts.Srid,
)
if err != nil {
logger.Fatal(err)
@ -38,11 +38,11 @@ func Run() {
logger.StopStep(step)
}
s, err := state.ParseLastState(config.BaseOptions.DiffDir)
s, err := state.ParseLastState(baseOpts.DiffDir)
if err != nil {
log.Fatal("unable to read last.state.txt", err)
}
replicationUrl := config.BaseOptions.ReplicationUrl
replicationUrl := baseOpts.ReplicationUrl
if replicationUrl == "" {
replicationUrl = s.Url
}
@ -51,24 +51,24 @@ func Run() {
"or replication_url in -config file")
}
logger.Print("Replication URL: " + replicationUrl)
logger.Print("Replication interval: ", config.BaseOptions.ReplicationInterval)
logger.Print("Replication interval: ", baseOpts.ReplicationInterval)
downloader := replication.NewDiffDownloader(
config.BaseOptions.DiffDir,
baseOpts.DiffDir,
replicationUrl,
s.Sequence,
config.BaseOptions.ReplicationInterval,
baseOpts.ReplicationInterval,
)
nextSeq := downloader.Sequences()
osmCache := cache.NewOSMCache(config.BaseOptions.CacheDir)
osmCache := cache.NewOSMCache(baseOpts.CacheDir)
err = osmCache.Open()
if err != nil {
logger.Fatal("osm cache: ", err)
}
defer osmCache.Close()
diffCache := cache.NewDiffCache(config.BaseOptions.CacheDir)
diffCache := cache.NewDiffCache(baseOpts.CacheDir)
err = diffCache.Open()
if err != nil {
logger.Fatal("diff cache: ", err)
@ -81,8 +81,8 @@ func Run() {
var tilelist *expire.TileList
var lastTlFlush = time.Now()
var tileExpireor expire.Expireor
if config.BaseOptions.ExpireTilesDir != "" {
tilelist = expire.NewTileList(config.BaseOptions.ExpireTilesZoom, config.BaseOptions.ExpireTilesDir)
if baseOpts.ExpireTilesDir != "" {
tilelist = expire.NewTileList(baseOpts.ExpireTilesZoom, baseOpts.ExpireTilesDir)
tileExpireor = tilelist
}
@ -113,7 +113,7 @@ func Run() {
for {
p := logger.StartStep(fmt.Sprintf("importing #%d till %s", seqId, seqTime))
err := Update(fname, geometryLimiter, tileExpireor, osmCache, diffCache, false)
err := Update(baseOpts, fname, geometryLimiter, tileExpireor, osmCache, diffCache, false)
osmCache.Coords.Flush()
diffCache.Flush()