adjust crawler log directory

src
Kevin Lynx 11 years ago
parent f16d25dae7
commit 6f6aac3b35

@ -30,7 +30,8 @@ do_start() ->
crawler_sup:start_link({StartPort, Count, DBHost, DBPort, LogLevel, DBConn, CacheMaxTime, CacheMaxCount}).
start() ->
error_logger:logfile({open, "crash.log"}),
filelib:ensure_dir("log/"),
error_logger:logfile({open, "log/crash.log"}),
code:add_path("deps/bson/ebin"),
code:add_path("deps/kdht/ebin"),
code:add_path("deps/mongodb/ebin"),

@ -136,7 +136,8 @@ date_string() ->
-define(TEXT(Fmt, Arg), lists:flatten(io_lib:format(Fmt, Arg))).
do_dump(State) ->
{ok, FP} = file:open("dhtcrawler-stats.txt", [append]),
filelib:ensure_dir("log/"),
{ok, FP} = file:open("log/dhtcrawler-stats.txt", [append]),
io:format(FP, "~s~n", [date_string()]),
io:format(FP, "~s~n", [format_stats(State)]),
file:close(FP).

@ -24,7 +24,8 @@ srv_name() ->
init([{StartPort, Count, DBHost, DBPort, LogLevel, DBConn, CacheTime, HashCacheMax}]) ->
Spec = {one_for_one, 1, 600},
Instances = create_dht_instance(StartPort, Count),
Logger = [{dht_logger, {vlog, start_link, ["dht_crawler.txt", LogLevel]},
filelib:ensure_dir("log/"),
Logger = [{dht_logger, {vlog, start_link, ["log/dht_crawler.txt", LogLevel]},
permanent, 2000, worker, dynamic}],
HashInserter = [{hash_cache_writer, {hash_cache_writer, start_link, [DBHost, DBPort, DBConn, CacheTime, HashCacheMax]},
permanent, 2000, worker, dynamic}],

Loading…
Cancel
Save