diff --git a/backend/src/c/bav/Makefile b/backend/src/c/bav/Makefile index 4fbfa4a01..431de69b2 100644 --- a/backend/src/c/bav/Makefile +++ b/backend/src/c/bav/Makefile @@ -9,7 +9,6 @@ # # #################################################################################################################################################################################################################################################################### # - CFLAGS = -I. -I../lib -pthread LDFLAGS = -L. -L../lib -pthread LIBS = -lagn diff --git a/backend/src/c/bav/bav.c b/backend/src/c/bav/bav.c index 196cb7c5a..cd6383ee2 100644 --- a/backend/src/c/bav/bav.c +++ b/backend/src/c/bav/bav.c @@ -402,7 +402,7 @@ find_locals (void) /*{{{*/ void *cfg; rc = true; - if (cfg = systemconfig_alloc (NULL)) { + if (cfg = systemconfig_alloc ()) { const char *value; char *copy; diff --git a/backend/src/c/lib/Makefile b/backend/src/c/lib/Makefile index 43a9c7f4c..42b32bf74 100644 --- a/backend/src/c/lib/Makefile +++ b/backend/src/c/lib/Makefile @@ -9,7 +9,6 @@ # # #################################################################################################################################################################################################################################################################### # - CFLAGS = -I. LDFLAGS = -L. LIBS := -lagn -lparson diff --git a/backend/src/c/lib/agn.h b/backend/src/c/lib/agn.h index 9b2b0ad0c..ac47ab648 100644 --- a/backend/src/c/lib/agn.h +++ b/backend/src/c/lib/agn.h @@ -262,6 +262,7 @@ typedef unsigned long logmask_t; /** * All informations required for logging */ +typedef struct idc idc_t; typedef struct { /*{{{*/ int logfd; /**< file desc. to copy output to */ int slprio; /**< syslog priority */ @@ -275,7 +276,7 @@ typedef struct { /*{{{*/ long lastday; /**< dito for the day */ int diff; /**< TZ drift */ FILE *lfp; /**< filepointer to output file */ - void *idc; /**< ID chain */ + idc_t *idc; /**< ID chain */ bool_t slactive; /**< syslog is active */ buffer_t *obuf; /**< output buffer */ buffer_t *collect; /**< to collect all messages */ @@ -478,7 +479,7 @@ extern bool_t log_collect (log_t *l, int level); extern void log_uncollect (log_t *l); extern bool_t log_idset (log_t *l, const char *what); extern void log_idclr (log_t *l); -extern bool_t log_idpush (log_t *l, const char *what, const char *separator); +extern bool_t log_idpush (log_t *l, const char *what); extern void log_idpop (log_t *l); extern void log_suspend_pop (log_t *l); extern void log_suspend_push (log_t *l, unsigned long mask, bool_t set); @@ -562,7 +563,7 @@ extern bool_t purl_set_anchor (purl_t *p, const byte_t *anchor); extern const byte_t *purl_build (purl_t *p, const char *extra_encode, int *rlen, bool_t (*callback) (void *, buffer_t *, const byte_t *, int), void *priv); extern void *systemconfig_free (void *lc); -extern void *systemconfig_alloc (const char *fname); +extern void *systemconfig_alloc (void); extern const char *systemconfig_find (void *lc, const char *key); extern bool_t systemconfig_get (void *lc, int idx, const char **key, const char **value); diff --git a/backend/src/c/lib/buffer.c b/backend/src/c/lib/buffer.c index 4fdd2d8e6..248f1a79b 100644 --- a/backend/src/c/lib/buffer.c +++ b/backend/src/c/lib/buffer.c @@ -730,19 +730,28 @@ pool_flush (pool_t *p) /*{{{*/ buffer_t * pool_request (pool_t *p, int nsize) /*{{{*/ { - buffer_t *b; + if (p -> root) { + buffer_t *b, *prev; - if (b = p -> root) { - p -> root = p -> root -> link; + for (b = p -> root, prev = NULL; b && b -> link; ) { + if ((b -> size < nsize) || ((b -> size >= nsize) && (b -> link -> size < nsize))) + break; + prev = b; + b = b -> link; + } + if (prev) + prev -> link = b -> link; + else + p -> root = b -> link; b -> link = NULL; buffer_clear (b); - buffer_size (b, nsize); - } else { - b = buffer_alloc (nsize); + if (b -> size < nsize) { + if (! buffer_size (b, nsize)) + buffer_clear (b); + } + return b; } - if (b && (! b -> valid)) - buffer_clear (b); - return b; + return buffer_alloc (nsize); }/*}}}*/ buffer_t * pool_release (pool_t *p, buffer_t *b) /*{{{*/ diff --git a/backend/src/c/lib/log.c b/backend/src/c/lib/log.c index 04a644b87..1ee1230f2 100644 --- a/backend/src/c/lib/log.c +++ b/backend/src/c/lib/log.c @@ -28,51 +28,48 @@ /** Stack for IDs. * This stack is used to keep track of logging IDs. */ -typedef struct idc { /*{{{*/ - char *str; /**< concated ID string */ - struct idc *next; /**< next element in stack */ +struct idc { /*{{{*/ + char *str; /**< concated ID string */ + idc_t *next; /**< next element in stack */ /*}}}*/ -} idc_t; +}; +/** Frees an ID. + * @param i the ID to free + * @return NULL + */ +static idc_t * +idc_free (idc_t *i) /*{{{*/ +{ + if (i) { + if (i -> str) + free (i -> str); + free (i); + } + return NULL; +}/*}}}*/ /** Alloced ID. * @param prefix the already stacked IDs * @param str the new ID - * @param separator * @return the new head of the stack on success, NULL otherwise */ static idc_t * -idc_alloc (idc_t *prefix, const char *str, const char *separator) /*{{{*/ +idc_alloc (idc_t *prefix, const char *str) /*{{{*/ { idc_t *i; if (i = (idc_t *) malloc (sizeof (idc_t))) { if (prefix) { - if (i -> str = malloc (strlen (prefix -> str) + strlen (str) + (separator ? strlen (separator) : 0) + 1)) - sprintf (i -> str, "%s%s%s", prefix -> str, (separator ? separator : ""), str); + if (i -> str = malloc (strlen (prefix -> str) + strlen (str) + 3)) + sprintf (i -> str, "%s->%s", prefix -> str, str); } else i -> str = strdup (str); if (i -> str) i -> next = NULL; - else { - free (i); - i = NULL; - } + else + i = idc_free (i); } return i; }/*}}}*/ -/** Frees an ID. - * @param i the ID to free - * @return NULL - */ -static idc_t * -idc_free (idc_t *i) /*{{{*/ -{ - if (i) { - if (i -> str) - free (i -> str); - free (i); - } - return NULL; -}/*}}}*/ /** Frees stack. * @param i the ID to start from * @return NULL @@ -267,7 +264,7 @@ log_free (log_t *l) /*{{{*/ if (l -> lfp) fclose (l -> lfp); if (l -> idc) - idc_free_all ((idc_t *) l -> idc); + idc_free_all (l -> idc); if (l -> obuf) buffer_free (l -> obuf); if (l -> collect) @@ -353,7 +350,7 @@ log_path_set (log_t *l, const char *logpath) /*{{{*/ return (logpath && (! l -> logpath)) ? false : true; }/*}}}*/ /** Sets default logging path. - * Sets the default logging path creating from enviroment variable + * Sets the default logging path creating from environment variable * @param l the logger * @return true on success, false otherwise */ @@ -460,7 +457,7 @@ bool_t log_idset (log_t *l, const char *what) /*{{{*/ { log_idclr (l); - l -> idc = idc_alloc (NULL, what, NULL); + l -> idc = idc_alloc (NULL, what); return l -> idc ? true : false; }/*}}}*/ /** Clears logging IDs. @@ -471,24 +468,23 @@ void log_idclr (log_t *l) /*{{{*/ { if (l -> idc) - l -> idc = idc_free_all ((idc_t *) l -> idc); + l -> idc = idc_free_all (l -> idc); }/*}}}*/ /** Push new logging ID. * The new logging id what is pushed on top of the ID stack. * If there is already one on the stack, the new ID is created using - * the stack value, concaternated by separator and new ID + * the stack value. * @param l the logger * @param what the new ID - * @param separator the separator for concaternation * @return true on success, false otherwise */ bool_t -log_idpush (log_t *l, const char *what, const char *separator) /*{{{*/ +log_idpush (log_t *l, const char *what) /*{{{*/ { idc_t *tmp; - if (tmp = idc_alloc ((separator ? (idc_t *) l -> idc : NULL), what, separator)) { - tmp -> next = (idc_t *) l -> idc; + if (tmp = idc_alloc (l -> idc, what)) { + tmp -> next = l -> idc; l -> idc = tmp; } return tmp ? true : false; @@ -502,7 +498,7 @@ log_idpop (log_t *l) /*{{{*/ { idc_t *tmp; - if (tmp = (idc_t *) l -> idc) { + if (tmp = l -> idc) { l -> idc = tmp -> next; idc_free (tmp); } @@ -684,7 +680,7 @@ log_mout (log_t *l, int level, logmask_t mask, const char *what, const char *fmt bool_t log_vidout (log_t *l, int level, logmask_t mask, const char *fmt, va_list par) /*{{{*/ { - return log_vmout (l, level, mask, (l -> idc ? ((idc_t *) l -> idc) -> str : NULL), fmt, par); + return log_vmout (l, level, mask, (l -> idc ? l -> idc -> str : NULL), fmt, par); }/*}}}*/ /** Write to logfile. * Same as log_vidout except that parameter are passed directly @@ -747,7 +743,7 @@ log_slout (log_t *l, int level, logmask_t mask, int priority, const char *what, bool_t log_vout (log_t *l, int level, const char *fmt, va_list par) /*{{{*/ { - return log_vmout (l, level, 0, (l -> idc ? ((idc_t *) l -> idc) -> str : NULL), fmt, par); + return log_vmout (l, level, 0, l -> idc ? l -> idc -> str : NULL, fmt, par); }/*}}}*/ /** Write to logfile. * Same as log_vout except that parameter are passed directly diff --git a/backend/src/c/lib/systemconfig.c b/backend/src/c/lib/systemconfig.c index 0689b8f2a..a9f2ff1d8 100644 --- a/backend/src/c/lib/systemconfig.c +++ b/backend/src/c/lib/systemconfig.c @@ -445,7 +445,7 @@ systemconfig_free (void *lc) /*{{{*/ return NULL; }/*}}}*/ void * -systemconfig_alloc (const char *fname) /*{{{*/ +systemconfig_alloc (void) /*{{{*/ { config_t *c; @@ -473,18 +473,18 @@ systemconfig_alloc (const char *fname) /*{{{*/ ok = false; } } else { - if (! fname) { - fname = getenv (PATH_CONFIG_ENV); - if (! fname) { - fname = PATH_CONFIG; + const char *filename; + + filename = getenv (PATH_CONFIG_ENV); + if (! filename) { + filename = PATH_CONFIG; # ifdef PATH_LEGACY - if (access (fname, R_OK) == -1) { - fname = PATH_LEGACY; - } -# endif + if ((access (filename, R_OK) == -1) && (access (PATH_LEGACY, R_OK) != -1)) { + filename = PATH_LEGACY; } +# endif } - if (! (c -> filename = strdup (fname))) { + if (! (c -> filename = strdup (filename))) { ok = false; } else { ok = config_check (c); diff --git a/backend/src/c/lib/timeout.c b/backend/src/c/lib/timeout.c index 679b39448..c33f18777 100644 --- a/backend/src/c/lib/timeout.c +++ b/backend/src/c/lib/timeout.c @@ -47,8 +47,9 @@ timeout_init (void) /*{{{*/ timeout_release (); timeout -> seconds = 0; timeout -> start = 0; + return true; } - return timeout ? true : false; + return false; }/*}}}*/ void timeout_release (void) /*{{{*/ diff --git a/backend/src/c/lib/xml.c b/backend/src/c/lib/xml.c index d1846bff7..c4bd6f64e 100644 --- a/backend/src/c/lib/xml.c +++ b/backend/src/c/lib/xml.c @@ -203,29 +203,6 @@ xsubstr (const xchar_t *s, int start, int end) /*{{{*/ rc = NULL; return rc; }/*}}}*/ -bool_t -xmlbuf_equal (xmlbuf_t *b1, xmlbuf_t *b2) /*{{{*/ -{ - if ((! b1) && (! b2)) - return true; - if (b1 && b2 && (b1 -> length == b2 -> length) && - ((! b1 -> length) || (! memcmp (b1 -> buffer, b2 -> buffer, b1 -> length)))) - return true; - return false; -}/*}}}*/ -char * -xmlbuf_to_string (xmlbuf_t *b) /*{{{*/ -{ - return buffer_copystring ((buffer_t *) b); -}/*}}}*/ -long -xmlbuf_to_long (xmlbuf_t *b) /*{{{*/ -{ - const char *s = b ? buffer_string (b) : NULL; - - return s ? strtol (s, NULL, 0) : -1; -}/*}}}*/ - static inline unsigned long mkcp (const xchar_t *s, int *len) /*{{{*/ { diff --git a/backend/src/c/lib/xml.h b/backend/src/c/lib/xml.h index 0f527c0f1..bd050e733 100644 --- a/backend/src/c/lib/xml.h +++ b/backend/src/c/lib/xml.h @@ -18,7 +18,6 @@ * XML data */ typedef byte_t xchar_t; -typedef buffer_t xmlbuf_t; typedef struct { /*{{{*/ int csize; cache_t *lower, @@ -40,67 +39,6 @@ extern int xstrnlen (const xchar_t *s, int slen); extern int xstrcmp (const xchar_t *s1, const char *s2); extern int xstrncmp (const xchar_t *s1, const char *s2, size_t n); extern xchar_t *xsubstr (const xchar_t *s, int start, int end); -extern bool_t xmlbuf_equal (xmlbuf_t *b1, xmlbuf_t *b2); -extern char *xmlbuf_to_string (xmlbuf_t *b); -extern long xmlbuf_to_long (xmlbuf_t *b); - -static inline xmlbuf_t * -xmlbuf_alloc (int nsize) /*{{{*/ -{ - return (xmlbuf_t *) buffer_alloc (nsize); -}/*}}}*/ -static inline xmlbuf_t * -xmlbuf_free (xmlbuf_t *b) /*{{{*/ -{ - return (xmlbuf_t *) buffer_free ((buffer_t *) b); -}/*}}}*/ -static inline void -xmlbuf_clear (xmlbuf_t *b) /*{{{*/ -{ - buffer_clear ((buffer_t *) b); -}/*}}}*/ -static inline int -xmlbuf_length (xmlbuf_t *b) /*{{{*/ -{ - return buffer_length ((buffer_t *) b); -}/*}}}*/ -static inline const xchar_t * -xmlbuf_content (xmlbuf_t *b) /*{{{*/ -{ - return (const xchar_t *) buffer_content ((buffer_t *) b); -}/*}}}*/ -static inline bool_t -xmlbuf_add (xmlbuf_t *b, const xchar_t *data, int dlen) /*{{{*/ -{ - return buffer_append ((buffer_t *) b, (const byte_t *) data, dlen); -}/*}}}*/ -static inline bool_t -xmlbuf_set (xmlbuf_t *b, const xchar_t *data, int dlen) /*{{{*/ -{ - xmlbuf_clear (b); - return xmlbuf_add (b, data, dlen); -}/*}}}*/ -static inline const char * -xmlbuf_string (xmlbuf_t *b) /*{{{*/ -{ - return buffer_string ((buffer_t *) b); -}/*}}}*/ -static inline char * -xmlbuf_copystring (xmlbuf_t *b) /*{{{*/ -{ - return buffer_copystring ((buffer_t *) b); -}/*}}}*/ -static inline xmlbuf_t * -pool_xrequest (pool_t *p, int nsize) /*{{{*/ -{ - return (xmlbuf_t *) pool_request (p, nsize); -}/*}}}*/ -static inline xmlbuf_t * -pool_xrelease (pool_t *p, xmlbuf_t *b) /*{{{*/ -{ - return (xmlbuf_t *) pool_release (p, (buffer_t *) b); -}/*}}}*/ - extern const xchar_t *xtolower (const xchar_t *s, int *slen, int *olen); extern const xchar_t *xtoupper (const xchar_t *s, int *slen, int *olen); extern const xchar_t *xtotitle (const xchar_t *s, int *slen, int *olen); diff --git a/backend/src/c/tools/Makefile b/backend/src/c/tools/Makefile index eaec57265..4c02453a1 100644 --- a/backend/src/c/tools/Makefile +++ b/backend/src/c/tools/Makefile @@ -9,7 +9,6 @@ # # #################################################################################################################################################################################################################################################################### # - CFLAGS = -I../lib LDFLAGS = -L../lib LIBS = -lagn diff --git a/backend/src/c/tools/cquery.c b/backend/src/c/tools/cquery.c index fbbcf2804..666e7db15 100644 --- a/backend/src/c/tools/cquery.c +++ b/backend/src/c/tools/cquery.c @@ -13,25 +13,20 @@ static void usage (const char *pgm) /*{{{*/ { - fprintf (stderr, "Usage: %s [-c ] [-d|+]\n", pgm); + fprintf (stderr, "Usage: %s [-d|+]\n", pgm); }/*}}}*/ int main (int argc, char **argv) /*{{{*/ { int rc; int n; - const char *config; bool_t dump; void *cfg; const char *key, *value; - config = NULL; dump = false; - while ((n = getopt (argc, argv, "c:d?h")) != -1) + while ((n = getopt (argc, argv, "d?h")) != -1) switch (n) { - case 'c': - config = optarg; - break; case 'd': dump = true; break; @@ -40,8 +35,8 @@ main (int argc, char **argv) /*{{{*/ default: return usage (argv[0]), (n != '?') && (n != 'h'); } - if (! (cfg = systemconfig_alloc (config))) - return fprintf (stderr, "Failed to setup config %s.\n", config ? config : "from default"), 1; + if (! (cfg = systemconfig_alloc ())) + return fprintf (stderr, "Failed to setup config.\n"), 1; rc = 0; if (dump) { if (optind == argc) { diff --git a/backend/src/c/tools/qctrl.c b/backend/src/c/tools/qctrl.c index b13cdb674..c2348e4b6 100644 --- a/backend/src/c/tools/qctrl.c +++ b/backend/src/c/tools/qctrl.c @@ -160,7 +160,7 @@ main (int argc, char **argv) /*{{{*/ bool_t fst; log_out (lg, LV_DEBUG, "Initializing command %s", cmdtab[n].cmd); - log_idpush (lg, cmdtab[n].cmd, NULL); + log_idpush (lg, cmdtab[n].cmd); data = (*cmdtab[n].finit) (lg, force, args, alen); log_idpop (lg); if (data) { @@ -178,7 +178,7 @@ main (int argc, char **argv) /*{{{*/ csig_block (csig); log_mark (lg, LV_INFO, 180); log_out (lg, LV_DEBUG, "Executing command %s", cmdtab[n].cmd); - log_idpush (lg, cmdtab[n].cmd, NULL); + log_idpush (lg, cmdtab[n].cmd); fst = (*cmdtab[n].fexec) (data); log_idpop (lg); if (! fst) { @@ -210,7 +210,7 @@ main (int argc, char **argv) /*{{{*/ csig_free (csig); } log_out (lg, LV_DEBUG, "Deinitialize command %s", cmdtab[n].cmd); - log_idpush (lg, cmdtab[n].cmd, NULL); + log_idpush (lg, cmdtab[n].cmd); fst = (*cmdtab[n].fdeinit) (data); log_idpop (lg); if (! fst) diff --git a/backend/src/c/xmlback/Makefile b/backend/src/c/xmlback/Makefile index 039d4cc10..9286ffcf5 100644 --- a/backend/src/c/xmlback/Makefile +++ b/backend/src/c/xmlback/Makefile @@ -9,8 +9,7 @@ # # #################################################################################################################################################################################################################################################################### # - -CFLAGS = '-DEMM_VERSION="OpenEMM 22.10.000.122"' -I. -I../lib -Iinclude +CFLAGS = '-DEMM_VERSION="OpenEMM 23.04.000.199"' -I. -I../lib -Iinclude LDFLAGS = -L. -L../lib -Llib LIBS = -lagn -lslang -lopendkim -lbsd -lresolv -llua -lmpack -lssl -lcrypto -lxml2 -lz -lm SRCS = entity.c xmlback.c parse.c create.c replace.c modify.c protect.c convert.c append.c \ @@ -42,8 +41,8 @@ lib include: [ -d $@ ] || mkdir $@ lib/libslang.a: slang-1.4.9.tar.gz slang-1.4.9.patch tar xaf slang-1.4.9.tar.gz && ( cd slang-1.4.9; patch -p1 < ../slang-1.4.9.patch; ./configure; make; mv src/objs/libslang.a ../lib; mv src/slang.h ../include; cd ..; rm -rf slang-1.4.9 ) -lib/liblua.a: lua-5.3.5.tar.gz - tar xaf lua-5.3.5.tar.gz && ( cd lua-5.3.5; make linux; mv src/liblua.a ../lib; mv src/lauxlib.h src/lua.h src/luaconf.h src/lualib.h ../include; cd ..; rm -rf lua-5.3.5 ) +lib/liblua.a: lua-5.4.4.tar.gz + tar xaf lua-5.4.4.tar.gz && ( cd lua-5.4.4; make linux; mv src/liblua.a ../lib; mv src/lauxlib.h src/lua.h src/luaconf.h src/lualib.h ../include; cd ..; rm -rf lua-5.4.4 ) lib/libssl.a lib/libcrypto.a: openssl-1.0.1j.tar.gz tar xzf openssl-1.0.1j.tar.gz && ( cd openssl-1.0.1j; ./config no-shared no-dso --prefix=`dirname \`pwd\`` --openssldir=/home/openemm//etc/openssl; make; mv libcrypto.a libssl.a ../lib; rm -rf ../include/openssl; mkdir ../include/openssl; cp include/openssl/*.h ../include/openssl; cd ..; rm -rf rm -rf openssl-1.0.1j ) lib/libz.a: zlib-1.2.3.tar.gz diff --git a/backend/src/c/xmlback/alua.c b/backend/src/c/xmlback/alua.c index c47e699b5..f3206f453 100644 --- a/backend/src/c/xmlback/alua.c +++ b/backend/src/c/xmlback/alua.c @@ -12,6 +12,7 @@ # include # include # include +# define _GNU_SOURCE /* required to get extended fnmatch flag values */ # include # include # include @@ -797,50 +798,69 @@ alua_type (lua_State *lua) /*{{{*/ lua_pushstring (lua, str); return 1; }/*}}}*/ +static int +alua_dofile (lua_State *lua) /*{{{*/ +{ + return 0; +}/*}}}*/ +static int +alua_loadfile (lua_State *lua) /*{{{*/ +{ + lua_pushnil (lua); + lua_pushstring (lua, "not implemented"); + return 2; +}/*}}}*/ static struct { /*{{{*/ const char *libname; lua_CFunction libfunc; + bool_t sandbox; /*}}}*/ } alua_libtab[] = { /*{{{*/ - { "", luaopen_base }, - { LUA_STRLIBNAME, luaopen_string }, + { "", luaopen_base, true }, + { LUA_STRLIBNAME, luaopen_string, true }, # ifdef LUA_UTF8LIBNAME - { LUA_UTF8LIBNAME, luaopen_utf8 }, + { LUA_UTF8LIBNAME, luaopen_utf8, true }, # endif /* LUA_UTF8LIBNAME */ - { LUA_TABLIBNAME, luaopen_table }, - { LUA_MATHLIBNAME, luaopen_math }, - { LUA_COLIBNAME, luaopen_coroutine } + { LUA_TABLIBNAME, luaopen_table, true }, + { LUA_MATHLIBNAME, luaopen_math, true }, + { LUA_COLIBNAME, luaopen_coroutine, true } /*}}}*/ }; static struct { /*{{{*/ const char *modname; const char *funcname; lua_CFunction func; + bool_t sandbox; /*}}}*/ } alua_functab[] = { /*{{{*/ - { NULL, "type", alua_type } + { NULL, "type", alua_type, true }, + { NULL, "dofile", alua_dofile, true }, + { NULL, "loadfile", alua_loadfile, true } /*}}}*/ }; # define FTSIZE (sizeof (alua_functab) / sizeof (alua_functab[0])) void -alua_setup_libraries (lua_State *lua) /*{{{*/ +alua_setup_libraries (lua_State *lua, bool_t sandbox) /*{{{*/ { int n; const char *modname; for (n = 0; n < sizeof (alua_libtab) / sizeof (alua_libtab[0]); ++n) { + if ((! sandbox) || alua_libtab[n].sandbox) { # if LUA_VERSION_NUM >= 502 - luaL_requiref (lua, alua_libtab[n].libname, alua_libtab[n].libfunc, 1); - lua_pop (lua, 1); + luaL_requiref (lua, alua_libtab[n].libname, alua_libtab[n].libfunc, 1); + lua_pop (lua, 1); # else - lua_pushcfunction (lua, alua_libtab[n].libfunc); - lua_pushstring (lua, alua_libtab[n].libname); - lua_call (lua, 1, 0); -# endif + lua_pushcfunction (lua, alua_libtab[n].libfunc); + lua_pushstring (lua, alua_libtab[n].libname); + lua_call (lua, 1, 0); +# endif + } } alua_date_setup (lua); - alua_env_setup (lua); + if (! sandbox) + alua_env_setup (lua); alua_null_setup (lua); modname = NULL; for (n = 0; n <= FTSIZE; ++n) { @@ -860,7 +880,7 @@ alua_setup_libraries (lua_State *lua) /*{{{*/ } } } - if (n < FTSIZE) { + if ((n < FTSIZE) && ((! sandbox) || alua_functab[n].sandbox)) { lua_pushcfunction (lua, alua_functab[n].func); if (modname) lua_setfield (lua, -2, alua_functab[n].funcname); @@ -905,13 +925,13 @@ alua_panic (lua_State *lua) /*{{{*/ return 0; }/*}}}*/ lua_State * -alua_alloc (void) /*{{{*/ +alua_alloc (bool_t sandbox) /*{{{*/ { lua_State *lua; if (lua = lua_newstate (alua_allocator, NULL)) { lua_atpanic (lua, alua_panic); - alua_setup_libraries (lua); + alua_setup_libraries (lua, sandbox); } return lua; }/*}}}*/ @@ -951,12 +971,44 @@ alua_load (lua_State *lua, const char *name, const void *code, size_t clen) /*{{ rd.clen = clen; rd.sent = 0; # if LUA_VERSION_NUM >= 502 - if ((lua_load (lua, alua_reader, & rd, name, NULL) == 0) && (lua_pcall (lua, 0, 0, 0) == 0)) + if ((lua_load (lua, alua_reader, & rd, name, NULL) == 0) && (lua_pcall (lua, 0, 0, 0) == LUA_OK)) # else - if ((lua_load (lua, alua_reader, & rd, name) == 0) && (lua_pcall (lua, 0, 0, 0) == 0)) + if ((lua_load (lua, alua_reader, & rd, name) == 0) && (lua_pcall (lua, 0, 0, 0) == LUA_OK)) # endif rc = true; else rc = false; return rc; }/*}}}*/ +typedef struct { /*{{{*/ + lua_State *lua; + int nargs; + int nresults; + int msgh; + int rc; + /*}}}*/ +} pcall_t; +static void +pcall_wrapper (void *pp) /*{{{*/ +{ + pcall_t *pc = (pcall_t *) pp; + + pc -> rc = lua_pcall (pc -> lua, pc -> nargs, pc -> nresults, pc -> msgh); +}/*}}}*/ +int +alua_pcall (lua_State *lua, int nargs, int nresults, int msgh, int timeout) /*{{{*/ +{ + if (timeout > 0) { + pcall_t pc; + + pc.lua = lua; + pc.nargs = nargs; + pc.nresults = nresults; + pc.msgh = msgh; + pc.rc = -1; + if (timeout_exec (timeout, pcall_wrapper, & pc)) + return pc.rc; + return -1; + } else + return lua_pcall (lua, nargs, nresults, msgh); +}/*}}}*/ diff --git a/backend/src/c/xmlback/alua.h b/backend/src/c/xmlback/alua.h index 17e3f79ef..03b7ab25f 100644 --- a/backend/src/c/xmlback/alua.h +++ b/backend/src/c/xmlback/alua.h @@ -40,9 +40,10 @@ extern int alua_isdate (lua_State *lua, int idx); extern int alua_isnull (lua_State *lua, int idx); extern void alua_pushnull (lua_State *lua); -extern void alua_setup_libraries (lua_State *lua); +extern void alua_setup_libraries (lua_State *lua, bool_t sandbox); extern void alua_setup_function (lua_State *lua, const char *modname, const char *funcname, lua_CFunction func, void *closure); -extern lua_State *alua_alloc (void); +extern lua_State *alua_alloc (bool_t sandbox); extern void alua_free (lua_State *lua); extern bool_t alua_load (lua_State *lua, const char *name, const void *code, size_t clen); +extern int alua_pcall (lua_State *lua, int nargs, int nresults, int msgh, int timeout); # endif /* __ALUA_H */ diff --git a/backend/src/c/xmlback/block.c b/backend/src/c/xmlback/block.c index 878513370..9042f7106 100644 --- a/backend/src/c/xmlback/block.c +++ b/backend/src/c/xmlback/block.c @@ -9,8 +9,48 @@ * * ********************************************************************************************************************************************************************************************************************************************************************/ # include +# include +# include # include "xmlback.h" +static const xmlChar * +tagsearch (const xmlChar *haystack, int haystack_size, const xmlChar *needle, int needle_size, int *match_size) /*{{{*/ +{ + const xmlChar *match = NULL; + int match_index = 0; + int clen; + + while (haystack_size > 0) { + clen = xmlCharLength (*haystack); + if ((clen <= haystack_size) && (clen <= needle_size - match_index)) { + if ((clen == 1) && isspace (*haystack) && match && (xmlCharLength (needle[match_index]) == 1) && isspace (needle[match_index])) { + ++haystack; + --haystack_size; + ++match_index; + while ((haystack_size > 0) && (xmlCharLength (*haystack) == 1) && isspace (*haystack)) + ++haystack, --haystack_size; + while ((match_index < needle_size) && (xmlCharLength (needle[match_index]) == 1) && isspace (needle[match_index])) + ++match_index; + continue; + } + if (match && memcmp (haystack, needle + match_index, clen)) + match_index = 0; + if (! memcmp (haystack, needle + match_index, clen)) { + if (! match_index) + match = haystack; + match_index += clen; + if (match_index == needle_size) { + *match_size = haystack - match + clen; + return match; + } + } + } else + match_index = 0; + haystack += clen; + haystack_size -= clen; + } + return NULL; +}/*}}}*/ block_t * block_alloc (void) /*{{{*/ { @@ -42,7 +82,6 @@ block_alloc (void) /*{{{*/ b -> bcontent = NULL; b -> bout = NULL; DO_ZERO (b, tagpos); - b -> sorted = NULL; b -> revalidation.source = NULL; b -> revalidation.target = NULL; b -> inuse = false; @@ -78,8 +117,6 @@ block_free (block_t *b) /*{{{*/ if (b -> bout) buffer_free (b -> bout); DO_FREE (b, tagpos); - if (b -> sorted) - free (b -> sorted); if (b -> revalidation.source) buffer_free (b -> revalidation.source); if (b -> revalidation.target) @@ -88,6 +125,14 @@ block_free (block_t *b) /*{{{*/ } return NULL; }/*}}}*/ +void +block_swap_inout (block_t *b) /*{{{*/ +{ + xmlBufferPtr temp = b -> in; + + b -> in = b -> out; + b -> out = temp; +}/*}}}*/ bool_t block_setup_charset (block_t *b) /*{{{*/ { @@ -117,10 +162,27 @@ block_setup_charset (block_t *b) /*{{{*/ void block_setup_tagpositions (block_t *b, blockmail_t *blockmail) /*{{{*/ { - int n; + int n; + int position; + const xmlChar *content = b -> content ? xmlBufferContent (b -> content) : NULL; + int length = b -> content ? xmlBufferLength (b -> content) : 0; - for (n = 0; n < b -> tagpos_count; ++n) - tagpos_setup_tag (b -> tagpos[n], blockmail); + for (n = 0, position = 0; n < b -> tagpos_count; ++n) { + tagpos_t *tp = b -> tagpos[n]; + + if (content && tp -> name) { + int match_size; + const xmlChar *hit = tagsearch (content + position, length - position, xmlBufferContent (tp -> name), xmlBufferLength (tp -> name), & match_size); + + if (hit) { + tp -> start = hit - content; + tp -> end = position = tp -> start + match_size; + } else { + tp -> start = tp -> end = position = length; + } + } + tagpos_setup_tag (tp, blockmail); + } }/*}}}*/ void block_find_method (block_t *b) /*{{{*/ diff --git a/backend/src/c/xmlback/blockmail.c b/backend/src/c/xmlback/blockmail.c index 7abce49f0..70701ac5b 100644 --- a/backend/src/c/xmlback/blockmail.c +++ b/backend/src/c/xmlback/blockmail.c @@ -103,6 +103,7 @@ blockmail_alloc (const char *fname, bool_t syncfile, log_t *lg) /*{{{*/ b -> active = false; b -> reason = REASON_UNSPEC; b -> reason_detail = 0; + b -> reason_custom = NULL; b -> head = NULL; b -> body = NULL; b -> rblocks = NULL; @@ -139,7 +140,9 @@ blockmail_alloc (const char *fname, bool_t syncfile, log_t *lg) /*{{{*/ b -> auto_url = NULL; b -> auto_url_is_dynamic = false; b -> auto_url_prefix = NULL; + b -> gui = false; b -> anon = false; + b -> anon_preserve_links = false; b -> selector = NULL; b -> convert_to_entities = false; b -> onepixel_url = NULL; @@ -177,6 +180,8 @@ blockmail_alloc (const char *fname, bool_t syncfile, log_t *lg) /*{{{*/ b -> mtbuf[0] = NULL; b -> mtbuf[1] = NULL; + b -> use_new_url_modification = false; + DO_ZERO (b, url); DO_ZERO (b, link_resolve); @@ -243,6 +248,8 @@ blockmail_free (blockmail_t *b) /*{{{*/ tracker_free (b -> tracker); if (b -> counter) counter_free_all (b -> counter); + if (b -> reason_custom) + free (b -> reason_custom); if (b -> head) buffer_free (b -> head); if (b -> body) @@ -423,7 +430,7 @@ blockmail_unsync (blockmail_t *b) /*{{{*/ } }/*}}}*/ bool_t -blockmail_insync (blockmail_t *b, int cid, const char *mediatype, int subtype, int chunks, int bcccount) /*{{{*/ +blockmail_insync (blockmail_t *b, receiver_t *rec, int bcccount) /*{{{*/ { bool_t rc; @@ -435,6 +442,7 @@ blockmail_insync (blockmail_t *b, int cid, const char *mediatype, int subtype, i char *size, *mtyp, *temp; int styp; int ncid; + int chunks; long bytes; while (inp = fgets (buf, sizeof (buf) - 1, b -> syfp)) { @@ -466,9 +474,9 @@ blockmail_insync (blockmail_t *b, int cid, const char *mediatype, int subtype, i } } } - if (mtyp && (cid == ncid) && (! strcmp (mtyp, mediatype)) && (styp == subtype)) { + if (mtyp && (rec -> customer_id == ncid) && (! strcmp (mtyp, rec -> mid)) && (styp == rec -> mailtype)) { if ((bytes > 0) && b -> mailtrack) - mailtrack_add (b -> mailtrack, cid); + mailtrack_add (b -> mailtrack, rec); rc = true; break; } @@ -479,7 +487,7 @@ blockmail_insync (blockmail_t *b, int cid, const char *mediatype, int subtype, i return rc; }/*}}}*/ bool_t -blockmail_tosync (blockmail_t *b, int cid, const char *mediatype, int subtype, int chunks, long size, int bcccount) /*{{{*/ +blockmail_tosync (blockmail_t *b, receiver_t *rec, int bcccount) /*{{{*/ { bool_t rc; @@ -494,17 +502,17 @@ blockmail_tosync (blockmail_t *b, int cid, const char *mediatype, int subtype, i } else pos = -1; if (rc) { - if ((fprintf (b -> syfp, "%d;%ld;%s;%d;%d\n", cid, size, mediatype, subtype, chunks) == -1) || + if ((fprintf (b -> syfp, "%d;%ld;%s;%d;%d\n", rec -> customer_id, rec -> size, rec -> mid, rec -> mailtype, rec -> chunks) == -1) || (fflush (b -> syfp) == -1)) rc = false; if (rc && (pos != -1) && (fseek (b -> syfp, pos, SEEK_SET) == -1)) rc = false; } } - if (rc && cid) { - rc = blockmail_count (b, mediatype, subtype, chunks, size, bcccount); + if (rc && rec -> customer_id) { + rc = blockmail_count (b, rec -> mid, rec -> mailtype, rec -> chunks, rec -> size, bcccount); if (b -> active && b -> mailtrack) - mailtrack_add (b -> mailtrack, cid); + mailtrack_add (b -> mailtrack, rec); } return rc; }/*}}}*/ @@ -760,9 +768,10 @@ blockmail_setup_auto_url_prefix (blockmail_t *b, const char *nprefix) /*{{{*/ b -> auto_url_prefix = nprefix && *nprefix ? strdup (nprefix) : NULL; }/*}}}*/ void -blockmail_setup_anon (blockmail_t *b, bool_t anon) /*{{{*/ +blockmail_setup_anon (blockmail_t *b, bool_t anon, bool_t anon_preserve_links) /*{{{*/ { b -> anon = anon; + b -> anon_preserve_links = anon_preserve_links; }/*}}}*/ void blockmail_setup_selector (blockmail_t *b, const char *selector) /*{{{*/ diff --git a/backend/src/c/xmlback/create.c b/backend/src/c/xmlback/create.c index 5f15bfb88..c0626467a 100644 --- a/backend/src/c/xmlback/create.c +++ b/backend/src/c/xmlback/create.c @@ -111,17 +111,18 @@ fixer (fix_t *fix, int attcount, blockmail_t *blockmail, receiver_t *rec, bool_t static bool_t create_mail (blockmail_t *blockmail, receiver_t *rec) /*{{{*/ { - int n, m; - bool_t st; - int attcount; - links_t *links; - postfix_t *postfixes; - buffer_t *dest; + int n, m; + bool_t st; + int attcount; + links_t *links; + postfix_t *postfixes; + buffer_t *dest; mailtypedefinition_t *mtyp; - blockspec_t *bspec; - block_t *block; - rblock_t *rbprev, *rbhead; - bool_t changed; + blockspec_t *bspec; + block_t *block; + block_t *header; + rblock_t *rbprev, *rbhead; + bool_t changed; mtyp = (rec -> mailtype >= 0) && (rec -> mailtype < blockmail -> mailtypedefinition_count) ? blockmail -> mailtypedefinition[rec -> mailtype] : NULL; if (! mtyp) { @@ -140,6 +141,7 @@ create_mail (blockmail_t *blockmail, receiver_t *rec) /*{{{*/ * 1. Stage: check for usful blocks, count attachments and * create the content part */ links = mtyp -> offline ? links_alloc () : NULL; + header = NULL; for (n = 0; st && (n < mtyp -> blockspec_count); ++n) { bspec = mtyp -> blockspec[n]; block = bspec -> block; @@ -178,7 +180,7 @@ create_mail (blockmail_t *blockmail, receiver_t *rec) /*{{{*/ attcount++; if (! block -> binary) { if (st) { - log_idpush (blockmail -> lg, "replace_tags", "->"); + log_idpush (blockmail -> lg, "replace_tags"); st = replace_tags (blockmail, rec, block, 0, true, (block -> tid != TID_EMail_Head ? NULL : replace_head), @@ -187,29 +189,19 @@ create_mail (blockmail_t *blockmail, receiver_t *rec) /*{{{*/ log_idpop (blockmail -> lg); if (! st) log_out (blockmail -> lg, LV_ERROR, "Unable to replace tags in block %d for %d", block -> nr, rec -> customer_id); + else if (block -> tid == TID_EMail_Head) { + if (block -> nr == 0) { + header = block; + } + } } - if (st) { - log_idpush (blockmail -> lg, "modify_output", "->"); + if (st && (block -> tid != TID_EMail_Head)) { + log_idpush (blockmail -> lg, "modify_output"); st = modify_output (blockmail, rec, block, bspec, links); log_idpop (blockmail -> lg); if (! st) log_out (blockmail -> lg, LV_ERROR, "Unable to modify output in block %d for %d", block -> nr, rec -> customer_id); } - if (st) { - if ((! blockmail -> raw) && (! block -> precoded)) { - log_idpush (blockmail -> lg, "convert_charset", "->"); - st = convert_charset (blockmail, block); - log_idpop (blockmail -> lg); - if (! st) - log_out (blockmail -> lg, LV_ERROR, "Unable to convert chararcter set in block %d for %d", block -> nr, rec -> customer_id); - } else { - xmlBufferPtr temp; - - temp = block -> out; - block -> out = block -> in; - block -> in = temp; - } - } } } if (changed && rec -> rvdata -> cur) @@ -217,9 +209,39 @@ create_mail (blockmail_t *blockmail, receiver_t *rec) /*{{{*/ } if (links) links_free (links); + + /* + * 2.1. Stage finalize header */ + if (st && header) { + log_idpush (blockmail -> lg, "modify_header"); + st = modify_header (blockmail, header); + log_idpop (blockmail -> lg); + if (! st) + log_out (blockmail -> lg, LV_ERROR, "Unable to modify header for %d", rec -> customer_id); + } + + /* + * 2.2. Stage: finalize blocks */ + for (n = 0; st && (n < mtyp -> blockspec_count); ++n) { + bspec = mtyp -> blockspec[n]; + block = bspec -> block; + if (block -> inuse && (! block -> binary)) { + if (st) { + if ((! blockmail -> raw) && (! block -> precoded)) { + log_idpush (blockmail -> lg, "convert_charset"); + st = convert_charset (blockmail, block); + log_idpop (blockmail -> lg); + if (! st) + log_out (blockmail -> lg, LV_ERROR, "Unable to convert chararcter set in block %d for %d", block -> nr, rec -> customer_id); + } else { + block_swap_inout (block); + } + } + } + } /* - * 2. Stage: determinate the required postfixes */ + * 3. Stage: determinate the required postfixes */ postfixes = NULL; for (n = 0; st && (n < mtyp -> blockspec_count); ++n) { bspec = mtyp -> blockspec[n]; @@ -253,7 +275,7 @@ create_mail (blockmail_t *blockmail, receiver_t *rec) /*{{{*/ } /* - * 3. Stage: create the output */ + * 4. Stage: create the output */ rbprev = NULL; rbhead = NULL; for (n = 0; st && (n <= mtyp -> blockspec_count); ++n) { @@ -371,6 +393,10 @@ create_output (blockmail_t *blockmail, receiver_t *rec) /*{{{*/ blockmail -> active = true; blockmail -> reason = REASON_UNSPEC; blockmail -> reason_detail = 0; + if (blockmail -> reason_custom) { + free (blockmail -> reason_custom); + blockmail -> reason_custom = NULL; + } blockmail -> head -> length = 0; blockmail -> body -> length = 0; if (blockmail -> raw && blockmail -> rblocks) diff --git a/backend/src/c/xmlback/eval.c b/backend/src/c/xmlback/eval.c index f7a76e6e8..e51330d86 100644 --- a/backend/src/c/xmlback/eval.c +++ b/backend/src/c/xmlback/eval.c @@ -1318,18 +1318,19 @@ do_init (blockmail_t *blockmail) /*{{{*/ s = NULL; if (! parse_error) { parse_error = buffer_alloc (512); + SLang_Error_Hook = record_error; + if ((SLang_init_slang () == -1) || + (SLang_init_slmath () == -1) || + (SLang_init_slassoc () == -1) || + (SLang_init_array () == -1) || + (SLdate_setup (& sd) == -1) || + (SLadd_intrin_fun_table (functab, (char *) "__XMLBACK__") == -1)) + return NULL; } else { parse_error -> length = 0; } ctx_set (blockmail); - SLang_Error_Hook = record_error; - if ((SLang_init_slang () != -1) && - (SLang_init_slmath () != -1) && - (SLang_init_slassoc () != -1) && - (SLang_init_array () != -1) && - (SLdate_setup (& sd) != -1) && - (SLadd_intrin_fun_table (functab, (char *) "__XMLBACK__") != -1) && - (s = (slang_t *) malloc (sizeof (slang_t)))) { + if (s = (slang_t *) malloc (sizeof (slang_t))) { char *rcfile; s -> blockmail = blockmail; diff --git a/backend/src/c/xmlback/generate.c b/backend/src/c/xmlback/generate.c index c120228a1..1ba1f0f1c 100644 --- a/backend/src/c/xmlback/generate.c +++ b/backend/src/c/xmlback/generate.c @@ -1539,21 +1539,30 @@ generate_owrite (void *data, blockmail_t *blockmail, receiver_t *rec) /*{{{*/ } if (! blockmail -> active) { char dsn[32]; - char scratch[128]; + char *custom; const char *reason; snprintf (dsn, sizeof (dsn) - 1, "1.%d.%d", blockmail -> reason, blockmail -> reason_detail); + custom = NULL; switch (blockmail -> reason) { case REASON_UNSPEC: reason = "skip=unspec reason"; break; case REASON_NO_MEDIA: reason = "skip=no media"; break; case REASON_EMPTY_DOCUMENT: reason = "skip=no document"; break; case REASON_UNMATCHED_MEDIA: reason = "skip=unmatched media"; break; + case REASON_CUSTOM: default: - snprintf (scratch, sizeof (scratch) - 1, "skip=reason %d", blockmail -> reason); - reason = scratch; + if ((blockmail -> reason == REASON_CUSTOM) && blockmail -> reason_custom) { + if (custom = malloc (strlen (blockmail -> reason_custom) + 6)) + sprintf (custom, "skip=%s", blockmail -> reason_custom); + } else if (custom = malloc (32)) { + sprintf (custom, "skip=reason %d", blockmail -> reason); + } + reason = custom; break; } - st = write_bounce_log (g, blockmail, rec, dsn, reason); + st = write_bounce_log (g, blockmail, rec, dsn, reason ? reason : "skip=not specified"); + if (custom) + free (custom); } else if (! rec -> media) { st = write_bounce_log (g, blockmail, rec, "1.0.0", "skip=missing media"); } else if (rec -> media -> type == Mediatype_EMail) { diff --git a/backend/src/c/xmlback/link_resolve.c b/backend/src/c/xmlback/link_resolve.c index cfb3a473b..1e66db8dd 100644 --- a/backend/src/c/xmlback/link_resolve.c +++ b/backend/src/c/xmlback/link_resolve.c @@ -374,7 +374,7 @@ to_target (hashtag_t *h, buffer_t *input1, xmlBufferPtr input2, buffer_t *target if (block && block -> translate) { xmlBufferPtr out = xmlBufferCreate (); - xmlBufferPtr buf; + xmlBufferPtr buf = NULL; if (input1) { buf = xmlBufferCreateSize (input1 -> length); @@ -388,7 +388,7 @@ to_target (hashtag_t *h, buffer_t *input1, xmlBufferPtr input2, buffer_t *target done = true; } xmlBufferFree (out); - if (input1) + if (buf) xmlBufferFree (buf); } if (! done) diff --git a/backend/src/c/xmlback/lua-5.3.5.tar.gz b/backend/src/c/xmlback/lua-5.3.5.tar.gz deleted file mode 100644 index 90c002729..000000000 Binary files a/backend/src/c/xmlback/lua-5.3.5.tar.gz and /dev/null differ diff --git a/backend/src/c/xmlback/lua-5.4.4.tar.gz b/backend/src/c/xmlback/lua-5.4.4.tar.gz new file mode 100644 index 000000000..f33e45812 Binary files /dev/null and b/backend/src/c/xmlback/lua-5.4.4.tar.gz differ diff --git a/backend/src/c/xmlback/luatc.c b/backend/src/c/xmlback/luatc.c index 26b476de4..8e2f8b554 100644 --- a/backend/src/c/xmlback/luatc.c +++ b/backend/src/c/xmlback/luatc.c @@ -290,7 +290,7 @@ do_unittest (iflua_t *il, bool_t quiet, int benchmark) /*{{{*/ return rc; }/*}}}*/ static bool_t -validate (const char *fname, codeblock_t *cb, bool_t quiet, bool_t postproc, bool_t unittest, int benchmark) /*{{{*/ +validate (const char *fname, codeblock_t *cb, bool_t quiet, bool_t unittest, int benchmark) /*{{{*/ { bool_t rc; log_t *lg; @@ -324,18 +324,19 @@ validate (const char *fname, codeblock_t *cb, bool_t quiet, bool_t postproc, boo blockmail -> company_token = strdup ("abc123"); blockmail -> mailinglist_id = 3; blockmail -> mailinglist_name = xmlBufferCreate (); - xmlBufferCat (blockmail -> mailinglist_name, (const xmlChar *) "Mailinglist"); + xmlBufferCCat (blockmail -> mailinglist_name, "Mailinglist"); blockmail -> mailing_id = 4; blockmail -> mailing_name = xmlBufferCreate (); - xmlBufferCat (blockmail -> mailing_name, (const xmlChar *) "Mailing"); + xmlBufferCCat (blockmail -> mailing_name, "Mailing"); blockmail -> maildrop_status_id = 5; blockmail -> status_field = 'W'; blockmail -> senddate = tf_parse_date ("2010-03-20 12:34:56"); - blockmail -> total_subscribers = 6; - blockmail -> blocknr = 7; blockmail -> secret_key = xmlBufferCreate (); xmlBufferCCat (blockmail -> secret_key, "This is secret"); - blockmail -> secret_timestamp = 8; + blockmail -> secret_timestamp = 6; + blockmail -> total_subscribers = 7; + blockmail -> domain = strdup ("exsample.com"); + blockmail -> blocknr = 8; string_map_addsi (blockmail -> smap, "licence_id", blockmail -> licence_id); string_map_addsi (blockmail -> smap, "owner_id", blockmail -> owner_id); string_map_addsi (blockmail -> smap, "company_id", blockmail -> company_id); @@ -356,7 +357,7 @@ validate (const char *fname, codeblock_t *cb, bool_t quiet, bool_t postproc, boo if (cur -> var[0] == '_') string_map_addss (blockmail -> smap, cur -> var + 1, cur -> val); } - for (run = cb; run; run = run ->next) { + for (run = cb; run; run = run -> next) { char id[1024]; if (run -> condition) @@ -365,41 +366,37 @@ validate (const char *fname, codeblock_t *cb, bool_t quiet, bool_t postproc, boo strcpy (id, fname); rc = false; if (r = receiver_alloc (blockmail, 0)) { + iflua_t *il; + r -> customer_id = 100; r -> user_type = 'W'; - if (postproc) { - fprintf (stderr, "Postprocessing not available for %s\n", id); - } else { - iflua_t *il; - - if (il = iflua_alloc (blockmail)) { - il -> rec = r; - alua_setup_function (il -> lua, NULL, "fileread", l_fileread, il); - if (quiet) - alua_setup_function (il -> lua, NULL, "print", l_silent, NULL); - if (alua_load (il -> lua, id, run -> code, run -> length)) { - if (unittest) - rc = do_unittest (il, quiet, benchmark); - else { - rc = true; - lua_getglobal (il -> lua, F_MAIN); - if (lua_isfunction (il -> lua, -1)) { - if (lua_pcall (il -> lua, 0, 0, 0) != 0) { - fprintf (stderr, "Failed to execute function \"" F_MAIN "\"\n"); - fprintf (stderr, "*** %s\n", lua_tostring (il -> lua, -1)); - rc = false; - } - } else - lua_pop (il -> lua, 1); - } - } else { - fprintf (stderr, "Failed to execute code for %s\n", id); - fprintf (stderr, "*** %s\n", lua_tostring (il -> lua, -1)); + if (il = iflua_alloc (blockmail, false)) { + il -> rec = r; + alua_setup_function (il -> lua, NULL, "fileread", l_fileread, il); + if (quiet) + alua_setup_function (il -> lua, NULL, "print", l_silent, NULL); + if (alua_load (il -> lua, id, run -> code, run -> length)) { + if (unittest) + rc = do_unittest (il, quiet, benchmark); + else { + rc = true; + lua_getglobal (il -> lua, F_MAIN); + if (lua_isfunction (il -> lua, -1)) { + if (lua_pcall (il -> lua, 0, 0, 0) != 0) { + fprintf (stderr, "Failed to execute function \"" F_MAIN "\"\n"); + fprintf (stderr, "*** %s\n", lua_tostring (il -> lua, -1)); + rc = false; + } + } else + lua_pop (il -> lua, 1); } - iflua_free (il); - } else - fprintf (stderr, "Failed to setup interpreter interface for %s\n", id); - } + } else { + fprintf (stderr, "Failed to execute code for %s\n", id); + fprintf (stderr, "*** %s\n", lua_tostring (il -> lua, -1)); + } + iflua_free (il); + } else + fprintf (stderr, "Failed to setup interpreter interface for %s\n", id); receiver_free (r); } else fprintf (stderr, "Failed to setup receiver structure for %s\n", id); @@ -417,23 +414,18 @@ main (int argc, char **argv) /*{{{*/ { int n; bool_t quiet; - bool_t postproc; bool_t unittest; int benchmark; int rc; quiet = false; - postproc = false; unittest = false; benchmark = 0; - while ((n = getopt (argc, argv, "hqpub:")) != -1) + while ((n = getopt (argc, argv, "hqub:")) != -1) switch (n) { case 'q': quiet = true; break; - case 'p': - postproc = true; - break; case 'u': unittest = true; break; @@ -449,10 +441,6 @@ main (int argc, char **argv) /*{{{*/ fprintf (stderr, "Usage: %s [-q] [-p | -u [-b ]] \n", argv[0]); return n != 'h'; } - if (postproc && unittest) { - fprintf (stderr, "Warning: unittest in postprocess mode not supported, switched off.\n"); - unittest = false; - } rc = 0; for (n = optind; n < argc; ++n) { char *buf; @@ -462,7 +450,7 @@ main (int argc, char **argv) /*{{{*/ codeblock_t *cb; if (cb = split_code (buf)) { - if (! validate (argv[n], cb, quiet, postproc, unittest, benchmark)) + if (! validate (argv[n], cb, quiet, unittest, benchmark)) rc = 1; codeblock_free_all (cb); } else { diff --git a/backend/src/c/xmlback/mailtrack.c b/backend/src/c/xmlback/mailtrack.c index 43aa43c37..601e2c440 100644 --- a/backend/src/c/xmlback/mailtrack.c +++ b/backend/src/c/xmlback/mailtrack.c @@ -35,8 +35,8 @@ mailtrack_free (mailtrack_t *m) /*{{{*/ return NULL; }/*}}}*/ void -mailtrack_add (mailtrack_t *m, int customer_id) /*{{{*/ +mailtrack_add (mailtrack_t *m, receiver_t *rec) /*{{{*/ { - buffer_format (m -> content, "%s%d", (m -> count ? "," : ""), customer_id); + buffer_format (m -> content, "%s%d/%d", (m -> count ? "," : ""), rec -> customer_id, rec -> media ? rec -> media -> type : Mediatype_Unspec); m -> count++; }/*}}}*/ diff --git a/backend/src/c/xmlback/misc.c b/backend/src/c/xmlback/misc.c index aa092e8e7..45827ea12 100644 --- a/backend/src/c/xmlback/misc.c +++ b/backend/src/c/xmlback/misc.c @@ -128,4 +128,38 @@ xml2long (xmlBufferPtr p) /*{{{*/ scratch[len] = '\0'; return strtol (scratch, NULL, 0); }/*}}}*/ +I void +entity_escape (xmlBufferPtr target, const xmlChar *source, int source_length) /*{{{*/ +{ + int clen; + + while (source_length > 0) { + clen = xmlCharLength (*source); + if (clen > 1) { + xmlBufferAdd (target, source, clen); + } else + switch (*source) { + default: + xmlBufferAdd (target, source, clen); + break; + case '&': + xmlBufferCCat (target, "&"); + break; + case '<': + xmlBufferCCat (target, "<"); + break; + case '>': + xmlBufferCCat (target, ">"); + break; + case '\'': + xmlBufferCCat (target, "'"); + break; + case '"': + xmlBufferCCat (target, """); + break; + } + source += clen; + source_length -= clen; + } +}/*}}}*/ # endif /* __MISC_C */ diff --git a/backend/src/c/xmlback/modify.c b/backend/src/c/xmlback/modify.c index 7ac1b0013..ff398155f 100644 --- a/backend/src/c/xmlback/modify.c +++ b/backend/src/c/xmlback/modify.c @@ -102,6 +102,8 @@ mkonepixellogurl (blockmail_t *blockmail, receiver_t *rec) /*{{{*/ buffer_set (blockmail -> link_maker, xmlBufferContent (blockmail -> onepixel_url), xmlBufferLength (blockmail -> onepixel_url)); buffer_appends (blockmail -> link_maker, "uid="); buffer_appends (blockmail -> link_maker, uid); + if (blockmail -> gui) + buffer_appends (blockmail -> link_maker, "&nocount=1"); } free (uid); return buffer_length (blockmail -> link_maker) ? buffer_string (blockmail -> link_maker) : NULL; @@ -195,7 +197,7 @@ typedef struct { /*{{{*/ /*}}}*/ } rplc_t; static bool_t -replace_anon_hashtags (void *rp, xmlbuf_t *output, const xchar_t *token, int tlen) /*{{{*/ +replace_anon_hashtags (void *rp, buffer_t *output, const xchar_t *token, int tlen) /*{{{*/ { rplc_t *replacer = (rplc_t *) rp; int pos; @@ -231,8 +233,8 @@ replace_anon_hashtags (void *rp, xmlbuf_t *output, const xchar_t *token, int tle free (param); return true; }/*}}}*/ -bool_t -modify_urls (blockmail_t *blockmail, receiver_t *rec, block_t *block, protect_t *protect, bool_t ishtml, record_t *record) /*{{{*/ +static bool_t +modify_urls_original (blockmail_t *blockmail, receiver_t *rec, block_t *block, protect_t *protect, bool_t ishtml, record_t *record) /*{{{*/ { int n; int len; @@ -320,7 +322,12 @@ modify_urls (blockmail_t *blockmail, receiver_t *rec, block_t *block, protect_t state = ST_PLAIN_START_FOUND; break; /* HTML */ - case 100: CCHK ('a'); break; + case 100: + if ((tolower (ch) == 'a') || (tolower (ch) == 'v')) + ++state; + else + state = ST_INITIAL; + break; # define HCHK(ccc) do { if ((ccc) == tolower (ch)) ++state; else if ('>' == ch) state = ST_INITIAL; else state = 101; } while (0) case 101: HCHK ('h'); break; case 102: HCHK ('r'); break; @@ -402,22 +409,253 @@ modify_urls (blockmail_t *blockmail, receiver_t *rec, block_t *block, protect_t } } if (blockmail -> anon) { - if ((m == blockmail -> url_count) || blockmail -> url[m] -> admin_link) { - xmlBufferAdd (block -> out, (const xmlChar *) "#", 1); + if (! blockmail -> anon_preserve_links) { + if ((m == blockmail -> url_count) || blockmail -> url[m] -> admin_link) { + xmlBufferAdd (block -> out, (const xmlChar *) "#", 1); + lstore = end; + changed = true; + } else if (url_is_personal (cont + start, ulen)) { + if (! scratch) + scratch = purl_alloc (NULL); + if (scratch && purl_parsen (scratch, cont + start, ulen)) { + const xchar_t *rplc; + int rlen; + rplc_t replacer = { blockmail, rec }; + + if ((rplc = purl_build (scratch, NULL, & rlen, replace_anon_hashtags, & replacer)) && rlen) + xmlBufferAdd (block -> out, (const xmlChar *) rplc, rlen); + lstore = end; + changed = true; + } + } + } + } else if (m < blockmail -> url_count) { + mkautourl (blockmail, rec, block, blockmail -> url[m], record); + lstore = end; + changed = true; + } else if (match && match -> resolved) { + const xmlChar *url = NULL; + int ulength = -1; + buffer_t *resolve; + + if (resolve = link_resolve_get (match -> resolved, blockmail, block, match, rec, record)) { + url = resolve -> buffer; + ulength = resolve -> length; + } + if (blockmail -> tracker) { + if (! url) { + url = cont + start; + ulength = ulen; + } + tracker_fill (blockmail -> tracker, blockmail, & url, & ulength); + } + if (url) { + if (ulength > 0) + xmlBufferAdd (block -> out, url, ulength); lstore = end; changed = true; - } else if (url_is_personal (cont + start, ulen)) { - if (! scratch) - scratch = purl_alloc (NULL); - if (scratch && purl_parsen (scratch, cont + start, ulen)) { - const xchar_t *rplc; - int rlen; - rplc_t replacer = { blockmail, rec }; - - if ((rplc = purl_build (scratch, NULL, & rlen, replace_anon_hashtags, & replacer)) && rlen) - xmlBufferAdd (block -> out, (const xmlChar *) rplc, rlen); + } + } + state = ST_INITIAL; + } + } + if (changed) { + if (lstore < len) + xmlBufferAdd (block -> out, cont + lstore, len - lstore); + SWAP (block); + } + if (scratch) + purl_free (scratch); + return true; +}/*}}}*/ +static bool_t +modify_urls_updated (blockmail_t *blockmail, receiver_t *rec, block_t *block, protect_t *protect, bool_t ishtml, record_t *record) /*{{{*/ +{ + int n; + int len; + const xmlChar *cont; + int lstore; + int state; + char ch, quote; + int start, end; + int mask; + int clen; + bool_t changed; + purl_t *scratch; + + scratch = NULL; + xmlBufferEmpty (block -> out); + len = xmlBufferLength (block -> in); + cont = xmlBufferContent (block -> in); + lstore = 0; + state = ST_INITIAL; + quote = '\0'; + if (ishtml) { + mask = 2; + } else { + mask = 1; + } + start = -1; + end = -1; + changed = false; + for (n = 0; n <= len; ) { + if (n < len) { + clen = xmlCharLength (cont[n]); + if (protect) { + if (n >= protect -> start) { + if (n < protect -> end) + n += clen; + else + protect = protect -> next; + continue; + } + } + if ((clen > 1) || isascii ((char) cont[n])) { + ch = clen == 1 ? (char) cont[n] : '\0'; + switch (state) { + case ST_INITIAL: + if (ishtml) { + if (ch == '<') { + state = 100; + } + } else if (strchr ("hm", tolower (ch))) { + if (tolower (ch) == 'h') { + state = 1; + } else { + state = 31; + } + start = n; + } + break; +# define CHK(ccc) do { if ((ccc) == ch) ++state; else state = ST_INITIAL; } while (0) +# define CCHK(ccc) do { if ((ccc) == tolower (ch)) ++state; else state = ST_INITIAL; } while (0) + /* plain: http:// and https:// */ + case 1: CCHK ('t'); break; + case 2: CCHK ('t'); break; + case 3: CCHK ('p'); break; + case 4: + ++state; + if (tolower (ch) == 's') + break; + /* Fall through . . . */ + case 5: CHK (':'); break; + case 6: CHK ('/'); break; + case 7: + if (ch == '/') + state = ST_PLAIN_START_FOUND; + else + state = ST_INITIAL; + break; + /* plain: mailto: */ + case 31: CCHK ('a'); break; + case 32: CCHK ('i'); break; + case 33: CCHK ('l'); break; + case 34: CCHK ('t'); break; + case 35: CCHK ('o'); break; + case 36: CHK (':'); break; + case 37: + state = ST_PLAIN_START_FOUND; + break; + /* HTML */ +# define HCHK(ccc) do { if ((ccc) == tolower (ch)) ++state; else if ('>' == ch) state = ST_INITIAL; else state = 100; } while (0) + case 100: HCHK ('h'); break; + case 101: HCHK ('r'); break; + case 102: HCHK ('e'); break; + case 103: HCHK ('f'); break; +# undef HCHK + case 104: CHK ('='); break; + case 105: + if ((ch == '"') || (ch == '\'')) { + quote = ch; + state = ST_QUOTED_START_FOUND; + start = n + 1; + } else { + state = ST_PLAIN_START_FOUND; + start = n; + } + break; + case ST_PLAIN_START_FOUND: + if (isspace (ch) || (ch == '>')) { + end = n; + state = ST_END_FOUND; + } + break; + case ST_QUOTED_START_FOUND: + if (isspace (ch) || (ch == quote)) { + end = n; + state = ST_END_FOUND; + } + break; + default: + log_out (blockmail -> lg, LV_ERROR, "modify_urls: invalid state %d at position %d", state, n); + state = ST_INITIAL; + break; + } +# undef CHK +# undef CCHK + } else { + if (state == ST_PLAIN_START_FOUND) { + end = n; + state = ST_END_FOUND; + } else + state = ST_INITIAL; + } + n += clen; + } else { + if (state == ST_PLAIN_START_FOUND) { + end = n; + state = ST_END_FOUND; + } + ++n; + } + if (state == ST_END_FOUND) { + int ulen, m; + url_t *match; + int first_orig = -1; + + ulen = end - start; + for (m = 0, match = NULL; m < blockmail -> url_count; ++m) { + if (url_match (blockmail -> url[m], cont + start, ulen)) { + match = blockmail -> url[m]; + if (blockmail -> url[m] -> usage & mask) + break; + } + if ((first_orig == -1) && blockmail -> url[m] -> orig) { + first_orig = m; + } + } + if (lstore < start) { + xmlBufferAdd (block -> out, cont + lstore, start - lstore); + lstore = start; + } + if ((match == NULL) && (first_orig != -1)) { + for (m = first_orig; m < blockmail -> url_count; ++m) { + if (url_match_original (blockmail -> url[m], cont + start, ulen)) { + match = blockmail -> url[m]; + if (blockmail -> url[m] -> usage & mask) + break; + } + } + } + if (blockmail -> anon) { + if (! blockmail -> anon_preserve_links) { + if ((m == blockmail -> url_count) || blockmail -> url[m] -> admin_link) { + xmlBufferAdd (block -> out, (const xmlChar *) "#", 1); lstore = end; changed = true; + } else if (url_is_personal (cont + start, ulen)) { + if (! scratch) + scratch = purl_alloc (NULL); + if (scratch && purl_parsen (scratch, cont + start, ulen)) { + const xchar_t *rplc; + int rlen; + rplc_t replacer = { blockmail, rec }; + + if ((rplc = purl_build (scratch, NULL, & rlen, replace_anon_hashtags, & replacer)) && rlen) + xmlBufferAdd (block -> out, (const xmlChar *) rplc, rlen); + lstore = end; + changed = true; + } } } } else if (m < blockmail -> url_count) { @@ -459,7 +697,11 @@ modify_urls (blockmail_t *blockmail, receiver_t *rec, block_t *block, protect_t purl_free (scratch); return true; }/*}}}*/ - +bool_t +modify_urls (blockmail_t *blockmail, receiver_t *rec, block_t *block, protect_t *protect, bool_t ishtml, record_t *record) /*{{{*/ +{ + return (blockmail -> use_new_url_modification ? modify_urls_updated : modify_urls_original) (blockmail, rec, block, protect, ishtml, record); +}/*}}}*/ static inline const byte_t * lskip (const byte_t *line, int *linelen) /*{{{*/ { @@ -1108,19 +1350,30 @@ modify_linelength (blockmail_t *blockmail, block_t *block, blockspec_t *bspec) / # undef DOIT_SKIP }/*}}}*/ bool_t +modify_header (blockmail_t *blockmail, block_t *header) /*{{{*/ +{ + bool_t rc = (header -> tid == TID_EMail_Head) ? true : false; + + if (rc && blockmail -> revalidate_mfrom) { + rc = revalidate_mfrom (blockmail, header); + } + return rc; +}/*}}}*/ +bool_t modify_output (blockmail_t *blockmail, receiver_t *rec, block_t *block, blockspec_t *bspec, links_t *links) /*{{{*/ { bool_t rc; rc = true; - if (rc && (block -> tid == TID_EMail_Head) && blockmail -> revalidate_mfrom) { - rc = revalidate_mfrom (blockmail, block); - } - if (rc && (block -> tid == TID_EMail_HTML) && links) { + if (rc && + (block -> tid == TID_EMail_HTML) && + links) { rc = collect_links (blockmail, block, links); } - if (rc && (! blockmail -> anon) && + if (rc && + (! blockmail -> anon) && (block -> tid == TID_EMail_HTML) && + bspec && (bspec -> opl != OPL_None)) { rc = add_onepixellog_image (blockmail, rec, block, bspec -> opl); } @@ -1129,10 +1382,18 @@ modify_output (blockmail_t *blockmail, receiver_t *rec, block_t *block, blockspe blockmail -> convert_to_entities) { rc = convert_entities (blockmail, block); } - if (rc && blockmail -> vip && (block -> tid == TID_EMail_HTML) && islower (rec -> user_type) && (tolower (blockmail -> status_field) == rec -> user_type)) { + if (rc && + blockmail -> vip && + (block -> tid == TID_EMail_HTML) && + islower (rec -> user_type) && + (tolower (blockmail -> status_field) == rec -> user_type)) { rc = add_vip_block (blockmail, block); } - if (rc && (bspec -> linelength > 0) && bspec -> linesep) { + if (rc && + (block -> tid == TID_EMail_Text) && + bspec && + (bspec -> linelength > 0) && + bspec -> linesep) { rc = modify_linelength (blockmail, block, bspec); } return rc; diff --git a/backend/src/c/xmlback/parse.c b/backend/src/c/xmlback/parse.c index 7f7e2b1b7..a1b3e0bdf 100644 --- a/backend/src/c/xmlback/parse.c +++ b/backend/src/c/xmlback/parse.c @@ -280,7 +280,7 @@ parse_info (blockmail_t *blockmail, xmlDocPtr doc, xmlNodePtr base, var_t **vbas ; else prev = NULL; - log_idpush (blockmail -> lg, name, "->"); + log_idpush (blockmail -> lg, name); for (node = base; node && st; node = node -> next) if (node -> type == XML_ELEMENT_NODE) { if (! xmlstrcmp (node -> name, "info")) { @@ -323,7 +323,7 @@ parse_description (blockmail_t *blockmail, xmlDocPtr doc, xmlNodePtr base) /*{{{ char *ptr; st = true; - log_idpush (blockmail -> lg, "description", "->"); + log_idpush (blockmail -> lg, "description"); for (node = base; node && st; node = node -> next) if (node -> type == XML_ELEMENT_NODE) { if (! xmlstrcmp (node -> name, "licence")) { @@ -391,7 +391,7 @@ parse_general (blockmail_t *blockmail, xmlDocPtr doc, xmlNodePtr base) /*{{{*/ xmlNodePtr node; st = true; - log_idpush (blockmail -> lg, "general", "->"); + log_idpush (blockmail -> lg, "general"); for (node = base; node && st; node = node -> next) if (node -> type == XML_ELEMENT_NODE) { if (! xmlstrcmp (node -> name, "domain")) @@ -459,7 +459,7 @@ parse_mailcreation (blockmail_t *blockmail, xmlDocPtr doc, xmlNodePtr base) /*{{ long val; st = true; - log_idpush (blockmail -> lg, "mailcreation", "->"); + log_idpush (blockmail -> lg, "mailcreation"); for (node = base; node && st; node = node -> next) if (node -> type == XML_ELEMENT_NODE) { if (! xmlstrcmp (node -> name, "blocknr")) { @@ -489,7 +489,7 @@ parse_trackers (blockmail_t *blockmail, xmlDocPtr doc, xmlNodePtr base) /*{{{*/ xmlNodePtr node; st = true; - log_idpush (blockmail -> lg, "trackers", "->"); + log_idpush (blockmail -> lg, "trackers"); for (node = base; node && st; node = node -> next) if (node -> type == XML_ELEMENT_NODE) { if (! xmlstrcmp (node -> name, "tracker")) { @@ -594,7 +594,7 @@ parse_mediatypes (blockmail_t *blockmail, xmlDocPtr doc, xmlNodePtr base) /*{{{* xmlNodePtr node; st = true; - log_idpush (blockmail -> lg, "mediatypes", "->"); + log_idpush (blockmail -> lg, "mediatypes"); for (node = base; node && st; node = node -> next) if (node -> type == XML_ELEMENT_NODE) { if (! xmlstrcmp (node -> name, "media")) { @@ -624,7 +624,7 @@ parse_tagposition (blockmail_t *blockmail, xmlDocPtr doc, xmlNodePtr base, tagpo xmlNodePtr node; st = true; - log_idpush (blockmail -> lg, "tagposition", "->"); + log_idpush (blockmail -> lg, "tagposition"); for (node = base; node && st; node = node -> next) if (node -> type == XML_ELEMENT_NODE) { if (! xmlstrcmp (node -> name, "block")) { @@ -655,8 +655,6 @@ parse_block (blockmail_t *blockmail, xmlDocPtr doc, xmlNodePtr node, block_t *bl if (extract_numeric_property (blockmail, & bid, node, "id") && extract_numeric_property (blockmail, & val, node, "nr")) { xmlNodePtr child; - int start, end; - const xmlChar *content; st = true; block -> bid = (int) bid; @@ -685,9 +683,6 @@ parse_block (blockmail_t *blockmail, xmlDocPtr doc, xmlNodePtr node, block_t *bl if (! extract_numeric_property (blockmail, & block -> target_id, node, "target_id")) { block -> target_id = 0; } - start = 0; - end = 0; - content = NULL; for (child = node -> children; st && child; child = child -> next) if (child -> type == XML_ELEMENT_NODE) { if (! xmlstrcmp (child -> name, "content")) { @@ -695,73 +690,36 @@ parse_block (blockmail_t *blockmail, xmlDocPtr doc, xmlNodePtr node, block_t *bl if (st = block_setup_charset (block)) { if (block -> binary && (! (st = block_code_binary (block)))) log_out (blockmail -> lg, LV_ERROR, "Unable to decode binary part in block %d in %s", block -> nr, blockmail -> fname); - else { - start = 0; - end = xmlBufferLength (block -> content); - content = xmlBufferContent (block -> content); - } } else log_out (blockmail -> lg, LV_ERROR, "Unable to setup charset in block %d in %s", block -> nr, blockmail -> fname); } else log_out (blockmail -> lg, LV_ERROR, "Unable to extract content of block %d in %s", block -> nr, blockmail -> fname); } else if (! xmlstrcmp (child -> name, "tagposition")) { - if (content) { - xmlChar *name; - tagpos_t *tpos; - int len; - const xmlChar *ptr; - int n; + xmlChar *name; + tagpos_t *tpos; - name = xmlGetProp (child, char2xml ("name")); - if (name) { - DO_EXPAND (tpos, block, tagpos); - if (tpos) { - xmlBufferCat (tpos -> name, name); - if (extract_numeric_property (blockmail, & val, child, "hash")) - tpos -> hash = val; - else - tpos -> hash = 0; - if (extract_numeric_property (blockmail, & val, child, "type")) - tpos -> type = val; - tagpos_find_name (tpos); - len = xmlBufferLength (tpos -> name); - ptr = xmlBufferContent (tpos -> name); - n = 0; - st = false; - while (start < end) { - if (content[start] != ptr[n]) - n = 0; - if (content[start] == ptr[n]) { - ++n; - if (n == len) { - st = true; - break; - } - } - ++start; - } - if (st) { - ++start; - tpos -> start = start - len; - tpos -> end = start; - if (child -> children) - st = parse_tagposition (blockmail, doc, child -> children, tpos); - } else - log_out (blockmail -> lg, LV_ERROR, "tagposition %s not found in %s", ptr, blockmail -> fname); - if (! st) - DO_SHRINK (block, tagpos); - } else - st = false; - } else { - log_out (blockmail -> lg, LV_ERROR, "Missing properties in element %s in %s", child -> name, blockmail -> fname); + name = xmlGetProp (child, char2xml ("name")); + if (name) { + DO_EXPAND (tpos, block, tagpos); + if (tpos) { + xmlBufferCat (tpos -> name, name); + if (extract_numeric_property (blockmail, & val, child, "hash")) + tpos -> hash = val; + else + tpos -> hash = 0; + if (extract_numeric_property (blockmail, & val, child, "type")) + tpos -> type = val; + tagpos_find_name (tpos); + if (child -> children) + st = parse_tagposition (blockmail, doc, child -> children, tpos); + } else st = false; - } - if (name) - xmlFree (name); } else { - log_out (blockmail -> lg, LV_ERROR, "Missing content for tagposition"); + log_out (blockmail -> lg, LV_ERROR, "Missing properties in element %s in %s", child -> name, blockmail -> fname); st = false; } + if (name) + xmlFree (name); } else unknown (blockmail, child); if (! st) @@ -772,11 +730,6 @@ parse_block (blockmail_t *blockmail, xmlDocPtr doc, xmlNodePtr node, block_t *bl log_out (blockmail -> lg, LV_ERROR, "Unable to setup block for BlockID %d in %s", block -> bid, blockmail -> fname); st = false; } - if (st && (block -> tagpos_count > 0)) - if (! (block -> sorted = (tagpos_t **) malloc (sizeof (tagpos_t *) * block -> tagpos_count))) { - log_out (blockmail -> lg, LV_ERROR, "Failed to alloc memory for sorted tag positions"); - st = false; - } } else { log_out (blockmail -> lg, LV_ERROR, "Missing number in block in %s", blockmail -> fname); st = false; @@ -790,7 +743,7 @@ parse_blocks (blockmail_t *blockmail, xmlDocPtr doc, xmlNodePtr base) /*{{{*/ xmlNodePtr node; st = true; - log_idpush (blockmail -> lg, "blocks", "->"); + log_idpush (blockmail -> lg, "blocks"); for (node = base; node && st; node = node -> next) if (node -> type == XML_ELEMENT_NODE) { if (! xmlstrcmp (node -> name, "block")) { @@ -819,7 +772,7 @@ parse_fixdata (blockmail_t *blockmail, xmlDocPtr doc, xmlNodePtr base, fix_t *f) xmlNodePtr node; st = true; - log_idpush (blockmail -> lg, "fixdata", "->"); + log_idpush (blockmail -> lg, "fixdata"); for (node = base; node && st; node = node -> next) if (node -> type == XML_ELEMENT_NODE) { if (! xmlstrcmp (node -> name, "fixdata")) { @@ -861,7 +814,7 @@ parse_blockspec (blockmail_t *blockmail, blockspec_t *bspec, xmlDocPtr doc, xmlN xmlNodePtr node; st = true; - log_idpush (blockmail -> lg, "blockspec", "->"); + log_idpush (blockmail -> lg, "blockspec"); for (node = base; node && st; node = node -> next) if (node -> type == XML_ELEMENT_NODE) { if (! xmlstrcmp (node -> name, "prefix")) { @@ -902,7 +855,7 @@ parse_type (blockmail_t *blockmail, mailtypedefinition_t *mtyp, xmlDocPtr doc, x char *ptr; st = true; - log_idpush (blockmail -> lg, "type", "->"); + log_idpush (blockmail -> lg, "type"); for (node = base; node && st; node = node -> next) if (node -> type == XML_ELEMENT_NODE) { if (! xmlstrcmp (node -> name, "blockspec")) { @@ -957,7 +910,7 @@ parse_types (blockmail_t *blockmail, xmlDocPtr doc, xmlNodePtr base) /*{{{*/ xmlNodePtr node; st = true; - log_idpush (blockmail -> lg, "types", "->"); + log_idpush (blockmail -> lg, "types"); for (node = base; node && st; node = node -> next) if (node -> type == XML_ELEMENT_NODE) { if (! xmlstrcmp (node -> name, "type")) { @@ -999,7 +952,7 @@ parse_layout (blockmail_t *blockmail, xmlDocPtr doc, xmlNodePtr base) /*{{{*/ field_t *field; st = true; - log_idpush (blockmail -> lg, "layout", "->"); + log_idpush (blockmail -> lg, "layout"); for (node = base; node && st; node = node -> next) if (node -> type == XML_ELEMENT_NODE) { if (! xmlstrcmp (node -> name, "element")) { @@ -1064,7 +1017,7 @@ parse_tag (blockmail_t *blockmail, tag_t **tbase, xmlDocPtr doc, xmlNodePtr base ; else prev = NULL; - log_idpush (blockmail -> lg, "tag", "->"); + log_idpush (blockmail -> lg, "tag"); for (node = base; node && st; node = node -> next) if (node -> type == XML_ELEMENT_NODE) { if (! xmlstrcmp (node -> name, "tag")) { @@ -1122,7 +1075,7 @@ parse_dyncont (blockmail_t *blockmail, xmlDocPtr doc, xmlNodePtr base, dyn_t *dy xmlNodePtr node; st = true; - log_idpush (blockmail -> lg, "content", "->"); + log_idpush (blockmail -> lg, "content"); for (node = base; node && st; node = node -> next) if (node -> type == XML_ELEMENT_NODE) { if (! xmlstrcmp (node -> name, "block")) { @@ -1154,7 +1107,7 @@ parse_dynamic (blockmail_t *blockmail, xmlDocPtr doc, xmlNodePtr base, st = true; *root = NULL; prv = NULL; - log_idpush (blockmail -> lg, "dynamic", "->"); + log_idpush (blockmail -> lg, "dynamic"); for (node = base; node && st; node = node -> next) if (node -> type == XML_ELEMENT_NODE) { if (! xmlstrcmp (node -> name, "dyncont")) { @@ -1217,7 +1170,7 @@ parse_dynamics (blockmail_t *blockmail, xmlDocPtr doc, xmlNodePtr base) /*{{{*/ st = true; prv = NULL; - log_idpush (blockmail -> lg, "dynamics", "->"); + log_idpush (blockmail -> lg, "dynamics"); for (node = base; node && st; node = node -> next) if (node -> type == XML_ELEMENT_NODE) { if (! xmlstrcmp (node -> name, "dynamic")) { @@ -1271,7 +1224,7 @@ parse_urls (blockmail_t *blockmail, xmlDocPtr doc, xmlNodePtr base) /*{{{*/ xmlNodePtr node; st = true; - log_idpush (blockmail -> lg, "urls", "->"); + log_idpush (blockmail -> lg, "urls"); for (node = base; node && st; node = node -> next) if (node -> type == XML_ELEMENT_NODE) { if (! xmlstrcmp (node -> name, "url")) { @@ -1316,7 +1269,7 @@ parse_details (blockmail_t *blockmail, xmlDocPtr doc, xmlNodePtr base, st = true; cur = NULL; - log_idpush (blockmail -> lg, "details", "->"); + log_idpush (blockmail -> lg, "details"); for (node = base; node && st; node = node -> next) if (node -> type == XML_ELEMENT_NODE) { if (! xmlstrcmp (node -> name, "record")) { @@ -1377,7 +1330,7 @@ parse_receivers (blockmail_t *blockmail, xmlDocPtr doc, xmlNodePtr base) /*{{{*/ char *ptr; st = false; - log_idpush (blockmail -> lg, "receivers", "->"); + log_idpush (blockmail -> lg, "receivers"); if (rec = receiver_alloc (blockmail, blockmail -> field_count)) { st = true; for (node = base; node && st; node = node -> next) { @@ -1458,17 +1411,17 @@ parse_receivers (blockmail_t *blockmail, xmlDocPtr doc, xmlNodePtr base) /*{{{*/ if (parse_details (blockmail, doc, node -> children, rec)) { st = true; rec -> dkim = blockmail -> dkim && sdkim_should_sign (blockmail -> dkim, rec) ? true : false; - log_idpush (blockmail -> lg, "create", "->"); + log_idpush (blockmail -> lg, "create"); st = create_output (blockmail, rec); if (blockmail -> eval) eval_done_match (blockmail -> eval); log_idpop (blockmail -> lg); if (st) { - log_idpush (blockmail -> lg, "write", "->"); - if (! blockmail_insync (blockmail, rec -> customer_id, rec -> mid, rec -> mailtype, rec -> chunks, bcccount)) { + log_idpush (blockmail -> lg, "write"); + if (! blockmail_insync (blockmail, rec, bcccount)) { st = (*blockmail -> output -> owrite) (blockmail -> outputdata, blockmail, rec); if (st) - st = blockmail_tosync (blockmail, rec -> customer_id, rec -> mid, rec -> mailtype, rec -> chunks, rec -> size, bcccount); + st = blockmail_tosync (blockmail, rec, bcccount); } log_idpop (blockmail -> lg); if (! st) @@ -1505,7 +1458,7 @@ parse_blockmail (blockmail_t *blockmail, xmlDocPtr doc, xmlNodePtr base) /*{{{*/ xmlNodePtr node; st = true; - log_idpush (blockmail -> lg, "blockmail", "->"); + log_idpush (blockmail -> lg, "blockmail"); for (node = base; node && st; node = node -> next) if (node -> type == XML_ELEMENT_NODE) { if (! xmlstrcmp (node -> name, "description")) diff --git a/backend/src/c/xmlback/replace.c b/backend/src/c/xmlback/replace.c index d138506d5..8f1ab5ab1 100644 --- a/backend/src/c/xmlback/replace.c +++ b/backend/src/c/xmlback/replace.c @@ -113,7 +113,11 @@ replace_tags (blockmail_t *blockmail, receiver_t *rec, block_t *block, { bool_t st; record_t *record; + xmlBufferPtr source; + tagpos_t **tagpos; + int tagpos_count; int sorted_size; + tagpos_t **sorted; const dyn_t *dyn, *root; int dyncount, dynused; long start, cur, next, end, len; @@ -129,33 +133,41 @@ replace_tags (blockmail_t *blockmail, receiver_t *rec, block_t *block, record = rec -> rvdata -> cur; if (! set_content (blockmail, rec, record)) st = false; - for (n = 0, sorted_size = 0; n < block -> tagpos_count; ++n) { - if ((block -> tagpos[n] -> type & TP_DYNAMIC) && block -> tagpos[n] -> tname && - (dyn = find_dynamic (blockmail, rec, block -> tagpos[n] -> tname)) && - dyn -> interest && (dyn -> interest_index != -1)) { - if (record -> isnull[dyn -> interest_index]) - block -> tagpos[n] -> sort_value = 0; - else - block -> tagpos[n] -> sort_value = xml2long (record -> data[dyn -> interest_index]); - block -> sorted[sorted_size++] = block -> tagpos[n]; - block -> tagpos[n] -> sort_enable = true; - } else - block -> tagpos[n] -> sort_enable = false; + source = block -> content; + tagpos = block -> tagpos; + tagpos_count = block -> tagpos_count; + if ((tagpos_count > 0) && (sorted = (tagpos_t **) malloc (sizeof (tagpos_t *) * tagpos_count))) { + for (n = 0, sorted_size = 0; n < tagpos_count; ++n) { + if ((tagpos[n] -> type & TP_DYNAMIC) && tagpos[n] -> tname && + (dyn = find_dynamic (blockmail, rec, tagpos[n] -> tname)) && + dyn -> interest && (dyn -> interest_index != -1)) { + if (record -> isnull[dyn -> interest_index]) + tagpos[n] -> sort_value = 0; + else + tagpos[n] -> sort_value = xml2long (record -> data[dyn -> interest_index]); + sorted[sorted_size++] = tagpos[n]; + tagpos[n] -> sort_enable = true; + } else + tagpos[n] -> sort_enable = false; + } + if (sorted_size > 1) + qsort (sorted, sorted_size, sizeof (sorted[0]), tp_compare); + } else { + sorted_size = 0; + sorted = NULL; } - if (sorted_size > 1) - qsort (block -> sorted, sorted_size, sizeof (block -> sorted[0]), tp_compare); dyncount = 0; dynused = 0; start = 0; proot = NULL; pprev = NULL; clear_output = false; - end = xmlBufferLength (block -> content); - content = xmlBufferContent (block -> content); + end = xmlBufferLength (source); + content = xmlBufferContent (source); xmlBufferEmpty (block -> in); for (cur = start, tidx = 0, sidx = 0; cur < end; ) { - if (tidx < block -> tagpos_count) { - tp = block -> tagpos[tidx++]; + if (tidx < tagpos_count) { + tp = tagpos[tidx++]; next = tp -> start; } else { tp = NULL; @@ -172,7 +184,7 @@ replace_tags (blockmail_t *blockmail, receiver_t *rec, block_t *block, ++dyncount; if (tp -> sort_enable) { if (sidx < sorted_size) - sp = block -> sorted[sidx++]; + sp = sorted[sidx++]; else sp = NULL; } else @@ -281,36 +293,7 @@ replace_tags (blockmail_t *blockmail, receiver_t *rec, block_t *block, } if (tag && (cont = tag_content (tag, blockmail, rec, & n)) && (n > 0)) { if (ispdf) { - int clen; - - while (n > 0) { - clen = xmlCharLength (*cont); - if (clen > 1) { - xmlBufferAdd (block -> in, cont, clen); - } else - switch (*cont) { - default: - xmlBufferAdd (block -> in, cont, clen); - break; - case '&': - xmlBufferCCat (block -> in, "&"); - break; - case '<': - xmlBufferCCat (block -> in, "<"); - break; - case '>': - xmlBufferCCat (block -> in, ">"); - break; - case '\'': - xmlBufferCCat (block -> in, "'"); - break; - case '"': - xmlBufferCCat (block -> in, """); - break; - } - cont += clen; - n -= clen; - } + entity_escape (block -> in, cont, n); } else if (replace) { individual_replace (replace, block -> in, cont, n); } else { @@ -327,6 +310,8 @@ replace_tags (blockmail_t *blockmail, receiver_t *rec, block_t *block, xmlBufferEmpty (block -> in); dynused = 0; } + if (sorted) + free (sorted); if ((level == 0) && (dyncount > 0) && (dynused == 0)) { /* have hit one empty text block */ if (rec -> media && rec -> media -> empty) { diff --git a/backend/src/c/xmlback/tflua.c b/backend/src/c/xmlback/tflua.c index e38610686..438bebb87 100644 --- a/backend/src/c/xmlback/tflua.c +++ b/backend/src/c/xmlback/tflua.c @@ -34,7 +34,6 @@ lua_setfield (il -> lua, -2, (nnn)); \ } while (0) -# if 0 static void stack_dump (lua_State *lua, const char *fmt, ...) __attribute__ ((format (printf, 2, 3))); static void stack_dump (lua_State *lua, const char *fmt, ...) /*{{{*/ @@ -59,7 +58,6 @@ stack_dump (lua_State *lua, const char *fmt, ...) /*{{{*/ } va_end (par); }/*}}}*/ -# endif static void push_record_field (lua_State *lua, char type, bool_t isnull, xmlBufferPtr data) /*{{{*/ { @@ -132,6 +130,7 @@ fetch_value (lua_State *lua, const char *column) /*{{{*/ typedef struct { /*{{{*/ log_t *lg; + bool_t sandbox; blockmail_t *blockmail; receiver_t *rec; int last_customer_id; @@ -146,6 +145,31 @@ typedef struct { /*{{{*/ # define GET_IFLUA(xxx) ((iflua_t *) lua_touserdata ((xxx), lua_upvalueindex (1))) +static void +iflua_verror (iflua_t *il, const char *message, va_list par) /*{{{*/ +{ + buffer_t *b; + + if (message) + if (b = buffer_alloc (512)) { + buffer_vformat (b, message, par); + lua_pushlstring (il -> lua, (const char *) buffer_content (b), buffer_length (b)); + buffer_free (b); + } else + lua_pushstring (il -> lua, message); + lua_error (il -> lua); +}/*}}}*/ +static void iflua_error (iflua_t *il, const char *message, ...) __attribute__ ((format (printf, 2, 3))); +static void +iflua_error (iflua_t *il, const char *message, ...) /*{{{*/ +{ + va_list par; + + va_start (par, message); + iflua_verror (il, message, par); + va_end (par); +}/*}}}*/ + static inline int iflua_convert (lua_State *lua, const xchar_t *(*func) (xconv_t *, const xchar_t *, int, int *)) /*{{{*/ { @@ -427,19 +451,20 @@ static struct { /*{{{*/ const char *modname; const char *funcname; lua_CFunction func; + bool_t sandbox; /*}}}*/ } iflua_functab[] = { /*{{{*/ - { LUA_STRLIBNAME, "xlower", iflua_xlower }, - { LUA_STRLIBNAME, "xupper", iflua_xupper }, - { LUA_STRLIBNAME, "xcapitalize", iflua_xcapitalize }, - { LUA_STRLIBNAME, "like", iflua_like }, - { LUA_MULTILIBNAME, "get", iflua_multi_get }, - { LUA_MULTILIBNAME, "pos", iflua_multi_pos }, - { LUA_MULTILIBNAME, "count", iflua_multi_count }, - { LUA_AGNLIBNAME, "loglevel", iflua_loglevel }, - { LUA_AGNLIBNAME, "log", iflua_log }, - { LUA_AGNLIBNAME, "makeuid", iflua_makeuid }, - { LUA_AGNLIBNAME, "strmap", iflua_strmap } + { LUA_STRLIBNAME, "xlower", iflua_xlower, true }, + { LUA_STRLIBNAME, "xupper", iflua_xupper, true }, + { LUA_STRLIBNAME, "xcapitalize", iflua_xcapitalize, true }, + { LUA_STRLIBNAME, "like", iflua_like, true }, + { LUA_MULTILIBNAME, "get", iflua_multi_get, true }, + { LUA_MULTILIBNAME, "pos", iflua_multi_pos, true }, + { LUA_MULTILIBNAME, "count", iflua_multi_count, true }, + { LUA_AGNLIBNAME, "loglevel", iflua_loglevel, false }, + { LUA_AGNLIBNAME, "log", iflua_log, false }, + { LUA_AGNLIBNAME, "makeuid", iflua_makeuid, false }, + { LUA_AGNLIBNAME, "strmap", iflua_strmap, true } /*}}}*/ }; @@ -449,7 +474,8 @@ iflua_setup_functions (iflua_t *il) /*{{{*/ int n; for (n = 0; n < sizeof (iflua_functab) / sizeof (iflua_functab[0]); ++n) - alua_setup_function (il -> lua, iflua_functab[n].modname, iflua_functab[n].funcname, iflua_functab[n].func, il); + if ((! il -> sandbox) || iflua_functab[n].sandbox) + alua_setup_function (il -> lua, iflua_functab[n].modname, iflua_functab[n].funcname, iflua_functab[n].func, il); }/*}}}*/ static void iflua_setup_context (iflua_t *il) /*{{{*/ @@ -491,33 +517,36 @@ iflua_setup_context (iflua_t *il) /*{{{*/ } else lua_pushnil (il -> lua); lua_setfield (il -> lua, -2, "senddate"); + setsfield (il -> blockmail -> auto_url_prefix, "auto_url_prefix"); + setbfield (il -> blockmail -> gui, "gui"); setbfield (il -> blockmail -> anon, "anon"); setsfield (il -> blockmail -> selector, "selector"); - setbfield (il -> blockmail -> rdir_content_links, "rdir_content_links"); setxfield (il -> blockmail -> auto_url, "auto_url"); setxfield (il -> blockmail -> anon_url, "anon_url"); setifield (il -> blockmail -> blocknr, "blocknr"); setifield (il -> blockmail -> total_subscribers, "total_subscribers"); - setsfield (il -> blockmail -> domain ? il -> blockmail -> domain : il -> blockmail -> fqdn, "domain"); - setsfield (il -> blockmail -> nodename, "node"); - setsfield (il -> blockmail -> fqdn, "fqdn"); - lua_createtable (il -> lua, 0, 0); - for (v = il -> blockmail -> company_info; v; v = v -> next) { - if (v -> val) - setsfield (v -> val, v -> var); - else - setnfield (v -> var); - if ((v -> var[0] == '_') && v -> val) - for (n = 0; n < sizeof (mapper) / sizeof (mapper[0]); ++n) - if ((! mapper[n].value) && (! strcmp (mapper[n].key, v -> var))) - mapper[n].value = v -> val; + if (! il -> sandbox) { + setsfield (il -> blockmail -> domain ? il -> blockmail -> domain : il -> blockmail -> fqdn, "domain"); + setsfield (il -> blockmail -> nodename, "node"); + setsfield (il -> blockmail -> fqdn, "fqdn"); + lua_createtable (il -> lua, 0, 0); + for (v = il -> blockmail -> company_info; v; v = v -> next) { + if (v -> val) + setsfield (v -> val, v -> var); + else + setnfield (v -> var); + if ((v -> var[0] == '_') && v -> val) + for (n = 0; n < sizeof (mapper) / sizeof (mapper[0]); ++n) + if ((! mapper[n].value) && (! strcmp (mapper[n].key, v -> var))) + mapper[n].value = v -> val; + } + lua_setfield (il -> lua, -2, "info"); + for (n = 0; n < sizeof (mapper) / sizeof (mapper[0]); ++n) + if (mapper[n].value) + setsfield (mapper[n].value, mapper[n].map); + else + setnfield (mapper[n].map); } - lua_setfield (il -> lua, -2, "info"); - for (n = 0; n < sizeof (mapper) / sizeof (mapper[0]); ++n) - if (mapper[n].value) - setsfield (mapper[n].value, mapper[n].map); - else - setnfield (mapper[n].map); lua_setfield (il -> lua, LUA_REGISTRYINDEX, ID_CTX); }/*}}}*/ static int @@ -596,12 +625,13 @@ iflua_free (iflua_t *il) /*{{{*/ return NULL; }/*}}}*/ static iflua_t * -iflua_alloc (blockmail_t *blockmail) /*{{{*/ +iflua_alloc (blockmail_t *blockmail, bool_t sandbox) /*{{{*/ { iflua_t *il; if (il = (iflua_t *) malloc (sizeof (iflua_t))) { il -> lg = log_alloc (NULL, LOG_LUA, NULL); + il -> sandbox = sandbox; il -> blockmail = blockmail; il -> rec = NULL; il -> last_customer_id = -1; @@ -609,7 +639,7 @@ iflua_alloc (blockmail_t *blockmail) /*{{{*/ il -> last_base_block = NULL; il -> local = NULL; il -> source = NULL; - if (il -> lua = alua_alloc ()) { + if (il -> lua = alua_alloc (sandbox)) { iflua_setup_functions (il); iflua_setup_context (il); iflua_setup_customer (il); @@ -694,7 +724,7 @@ static void iflua_lgpush (iflua_t *il, const char *lid) /*{{{*/ { if (il && il -> lg && lid) - log_idpush (il -> lg, lid, "->"); + log_idpush (il -> lg, lid); }/*}}}*/ static void iflua_lgpop (iflua_t *il) /*{{{*/ @@ -714,7 +744,7 @@ tf_lua_alloc (const char *func, tag_t *tag, blockmail_t *blockmail) /*{{{*/ iflua_t *il; il = NULL; - if (tag -> value && (il = iflua_alloc (blockmail))) { + if (tag -> value && (il = iflua_alloc (blockmail, false))) { iflua_set_source (il, xmlBufferContent (tag -> value), xmlBufferLength (tag -> value)); iflua_lgpush (il, func); if (! alua_load (il -> lua, func, xmlBufferContent (tag -> value), xmlBufferLength (tag -> value))) { @@ -778,7 +808,7 @@ tf_lua_proc (void *ilp, const char *func, tag_t *tag, blockmail_t *blockmail, re /* Doit */ rc = lua_pcall (il -> lua, 3, 1, 0); if (lua_gettop (il -> lua) > 0) { - if ((rc == 0) && alua_isdate (il -> lua, -1)) { + if ((rc == LUA_OK) && alua_isdate (il -> lua, -1)) { alua_date_t *date; if (date = alua_todate (il -> lua, -1)) { @@ -793,17 +823,17 @@ tf_lua_proc (void *ilp, const char *func, tag_t *tag, blockmail_t *blockmail, re } else result = lua_tostring (il -> lua, -1); } else { - if (rc == 0) + if (rc == LUA_OK) rc = -1; result = "no (usable) result returned"; } - if (rc == 0) { + if (rc == LUA_OK) { if (result) xmlBufferCCat (tag -> value, result); } else log_out (blockmail -> lg, LV_WARNING, "Tag \"%s\" propagates error \"%s\"", tag -> cname, (result ? result : "*no message found*")); iflua_lgpop (il); - return rc == 0 ? true : false; + return rc == LUA_OK ? true : false; }/*}}}*/ # define EV_FUNC "__evaluate" @@ -1067,7 +1097,7 @@ ev_lua_alloc (blockmail_t *blockmail, const char *expression) /*{{{*/ { iflua_t *il; - if (il = iflua_alloc (blockmail)) { + if (il = iflua_alloc (blockmail, true)) { char *frame; int flen; @@ -1131,7 +1161,7 @@ ev_lua_vevaluate (void *ilp, receiver_t *rec, va_list par) /*{{{*/ iflua_push_context (il); iflua_push_customer (il); lrc = lua_pcall (il -> lua, 2, 1, 0); - if (lrc == 0) { + if (lrc == LUA_OK) { rc = 0; if (lua_gettop (il -> lua) > 0) switch (lua_type (il -> lua, -1)) { diff --git a/backend/src/c/xmlback/xmlback.c b/backend/src/c/xmlback/xmlback.c index 948d4b010..78661000b 100644 --- a/backend/src/c/xmlback/xmlback.c +++ b/backend/src/c/xmlback/xmlback.c @@ -131,7 +131,9 @@ main (int argc, char **argv) /*{{{*/ output_t *out; const char *outparm; const char *auto_url_prefix; + bool_t gui; bool_t anon; + bool_t anon_preserve_links; const char *selector; bool_t convert_to_entities; bool_t force_ecs_uid; @@ -151,7 +153,9 @@ main (int argc, char **argv) /*{{{*/ out = & output_table[1]; outparm = NULL; auto_url_prefix = NULL; + gui = false; anon = false; + anon_preserve_links = false; selector = NULL; convert_to_entities = false; force_ecs_uid = false; @@ -163,7 +167,8 @@ main (int argc, char **argv) /*{{{*/ xmlInitializePredefinedEntities (); xmlInitCharEncodingHandlers (); json_set_escape_slashes (0); - while ((n = getopt (argc, argv, "VDpqE:lru:as:egd:t:o:L:T:h")) != -1) + opterr = 0; + while ((n = getopt (argc, argv, "VDpqE:lru:UaAs:egd:t:o:L:T:h")) != -1) switch (n) { case 'V': # ifdef EMM_VERSION @@ -188,9 +193,15 @@ main (int argc, char **argv) /*{{{*/ case 'u': auto_url_prefix = optarg; break; + case 'U': + gui = true; + break; case 'a': anon = true; break; + case 'A': + anon_preserve_links = true; + break; case 's': selector = optarg; break; @@ -231,7 +242,6 @@ main (int argc, char **argv) /*{{{*/ pointintime = atol (optarg); break; case 'h': - default: fprintf (stderr, "Usage: %s [-h] [-V] [-L ] [-D] [-v] [-p] [-q] [-E ] [-l] [-r] [-d ] [-o [:] \n", argv[0]); fprintf (stderr, " further options: [-u ] [-a] [-s ] [-e] [-f]\n"); fputs ("Function: read and process XML files generated from database representation\n" @@ -246,6 +256,7 @@ main (int argc, char **argv) /*{{{*/ "\t-r raw output, do not encode generated mails (used by preview)\n" "\t-u use as prefix for generated auto urls\n" "\t-a anonymize the output as far as possible\n" + "\t-A preserve links when creating anon output\n" "\t-s selector to restrict usage of text blocks\n" "\t-e convert known special characters to its HTML entity\n" "\t-g force generation of extended click statistics UIDs\n" @@ -264,7 +275,10 @@ main (int argc, char **argv) /*{{{*/ fputs ("\n", stderr); } } - return n != 'h'; + return 0; + default: + /* silently ignore unknown options */ + break; } pparm = NULL; if (outparm && outparm[0] && (! (pparm = parse_parm (outparm)))) @@ -313,7 +327,8 @@ main (int argc, char **argv) /*{{{*/ log_idset (lg, "init"); blockmail -> outputdata = (*out -> oinit) (blockmail, pparm); blockmail_setup_auto_url_prefix (blockmail, auto_url_prefix); - blockmail_setup_anon (blockmail, anon); + blockmail -> gui = gui || auto_url_prefix ? true : false; + blockmail_setup_anon (blockmail, anon, anon_preserve_links); blockmail_setup_selector (blockmail, selector); blockmail -> force_ecs_uid = force_ecs_uid; blockmail -> convert_to_entities = convert_to_entities; @@ -323,9 +338,11 @@ main (int argc, char **argv) /*{{{*/ if (! blockmail -> outputdata) log_out (lg, LV_ERROR, "Unable to initialize output method %s for %s", out -> name, argv[n]); else { - doc = xmlReadFile (argv[n], NULL, XML_PARSE_NONET | XML_PARSE_NOCDATA | XML_PARSE_COMPACT | XML_PARSE_HUGE); + doc = xmlReadFile (argv[n], NULL, XML_PARSE_NOENT | XML_PARSE_PEDANTIC | XML_PARSE_NONET | XML_PARSE_NOCDATA | XML_PARSE_COMPACT | XML_PARSE_HUGE); if (doc) { - if (doc -> encoding) { + if (doc -> encoding && + strcasecmp (xml2char (doc -> encoding), "UTF-8") && + strcasecmp (xml2char (doc -> encoding), "UTF8")) { blockmail -> translate = xmlFindCharEncodingHandler (xml2char (doc -> encoding)); if (! (blockmail -> translate -> input || blockmail -> translate -> iconv_in || blockmail -> translate -> output || blockmail -> translate -> iconv_out)) { diff --git a/backend/src/c/xmlback/xmlback.h b/backend/src/c/xmlback/xmlback.h index 147768603..82d2daf33 100644 --- a/backend/src/c/xmlback/xmlback.h +++ b/backend/src/c/xmlback/xmlback.h @@ -111,6 +111,7 @@ # define REASON_NO_MEDIA 1 # define REASON_EMPTY_DOCUMENT 2 # define REASON_UNMATCHED_MEDIA 3 +# define REASON_CUSTOM 4 typedef enum { /*{{{*/ EncNone, @@ -221,7 +222,6 @@ struct block { /*{{{*/ buffer_t *bout; /* encoded binary content */ DO_DECL (tagpos); /* all tags with position in .. */ /* .. content */ - tagpos_t **sorted; /* for output sorted tagpos */ struct { buffer_t *source; /* source copy .. */ buffer_t *target; /* .. and target for .. */ @@ -406,12 +406,13 @@ struct blockmail { /*{{{*/ bool_t active; /* if user is active */ int reason; /* code, if user not active */ int reason_detail; /* specific reason, if available*/ + char *reason_custom; /* custom reason text */ buffer_t *head; /* the created head .. */ buffer_t *body; /* .. and body */ rblock_t *rblocks; /* the raw blocks */ /* - * from here, the data is from the input file or from dynamic enviroment + * from here, the data is from the input file or from dynamic environment */ /* description part */ map_t *smap; @@ -434,7 +435,9 @@ struct blockmail { /*{{{*/ xmlBufferPtr auto_url; bool_t auto_url_is_dynamic; char *auto_url_prefix; + bool_t gui; bool_t anon; + bool_t anon_preserve_links; char *selector; bool_t convert_to_entities; xmlBufferPtr onepixel_url; @@ -482,6 +485,8 @@ struct blockmail { /*{{{*/ int dynamic_count; xmlBufferPtr mtbuf[2]; + bool_t use_new_url_modification; + /* URLs in the mailing */ DO_DECL (url); DO_DECL (link_resolve); @@ -624,6 +629,7 @@ extern bool_t replace_tags (blockmail_t *blockmail, receiver_t *rec, block_t *b const char *selector, bool_t ishtml, bool_t ispdf); extern bool_t modify_urls (blockmail_t *blockmail, receiver_t *rec, block_t *block, protect_t *protect, bool_t ishtml, record_t *record); +extern bool_t modify_header (blockmail_t *blockmail, block_t *header); extern bool_t modify_output (blockmail_t *blockmail, receiver_t *rec, block_t *block, blockspec_t *bspec, links_t *links); extern int convert_block (xmlCharEncodingHandlerPtr translate, xmlBufferPtr in, xmlBufferPtr out, bool_t isoutput); extern bool_t convert_charset (blockmail_t *blockmail, block_t *block); @@ -642,6 +648,7 @@ extern void tagpos_find_name (tagpos_t *t); extern void tagpos_setup_tag (tagpos_t *t, blockmail_t *blockmail); extern block_t *block_alloc (void); extern block_t *block_free (block_t *b); +extern void block_swap_inout (block_t *b); extern bool_t block_setup_charset (block_t *b); extern void block_setup_tagpositions (block_t *b, blockmail_t *blockmail); extern void block_find_method (block_t *b); @@ -692,15 +699,15 @@ extern bool_t rblock_retrieve_content (rblock_t *r, buffer_t *content); extern bool_t rblock_set_string_content (rblock_t *r, const char *content); extern mailtrack_t *mailtrack_alloc (int licence_id, int company_id, int mailing_id, int maildrop_status_id); extern mailtrack_t *mailtrack_free (mailtrack_t *m); -extern void mailtrack_add (mailtrack_t *m, int customer_id); +extern void mailtrack_add (mailtrack_t *m, receiver_t *rec); extern blockmail_t *blockmail_alloc (const char *fname, bool_t syncfile, log_t *lg); extern blockmail_t *blockmail_free (blockmail_t *b); extern time_t blockmail_now (blockmail_t *b); extern bool_t blockmail_count (blockmail_t *b, const char *mediatype, int subtype, int chunks, long bytes, int bcccount); extern void blockmail_count_sort (blockmail_t *b); extern void blockmail_unsync (blockmail_t *b); -extern bool_t blockmail_insync (blockmail_t *b, int cid, const char *mediatype, int subtype, int chunks, int bcccount); -extern bool_t blockmail_tosync (blockmail_t *b, int cid, const char *mediatype, int subtype, int chunks, long size, int bcccount); +extern bool_t blockmail_insync (blockmail_t *b, receiver_t *rec, int bcccount); +extern bool_t blockmail_tosync (blockmail_t *b, receiver_t *rec, int bcccount); extern bool_t blockmail_extract_mediatypes (blockmail_t *b); extern void blockmail_setup_senddate (blockmail_t *b, const char *date, time_t epoch); extern void blockmail_setup_company_configuration (blockmail_t *b); @@ -711,7 +718,7 @@ extern void blockmail_setup_onepixel_template (blockmail_t *b); extern void blockmail_setup_tagpositions (blockmail_t *b); extern void blockmail_setup_offline_picture_prefix (blockmail_t *b); extern void blockmail_setup_auto_url_prefix (blockmail_t *b, const char *nprefix); -extern void blockmail_setup_anon (blockmail_t *b, bool_t anon); +extern void blockmail_setup_anon (blockmail_t *b, bool_t anon, bool_t anon_preserve_links); extern void blockmail_setup_selector (blockmail_t *b, const char *selector); extern void blockmail_setup_preevaluated_targets (blockmail_t *blockmail); @@ -846,6 +853,7 @@ extern const char *byte2char (const byte_t *b); extern int xmlstrcmp (const xmlChar *s1, const char *s2); extern int xmlstrncmp (const xmlChar *s1, const char *s2, size_t n); extern long xml2long (xmlBufferPtr p); +extern void entity_escape (xmlBufferPtr target, const xmlChar *source, int source_length); # else /* __OPTIMIZE__ */ # define I static inline # include "misc.c" diff --git a/backend/src/script/control/update.sh b/backend/src/script/control/update.sh index 1c0e13d16..7e2254d98 100644 --- a/backend/src/script/control/update.sh +++ b/backend/src/script/control/update.sh @@ -16,7 +16,7 @@ case "$1" in start) active update shift - starter $command -bw "$@" + starter $command -bw --termination-delay=5m "$@" ;; stop) softterm $command diff --git a/backend/src/script/lib/agn3/aioruntime.py b/backend/src/script/lib/agn3/aioruntime.py index 49a29076c..c92a5d26f 100644 --- a/backend/src/script/lib/agn3/aioruntime.py +++ b/backend/src/script/lib/agn3/aioruntime.py @@ -16,10 +16,11 @@ from collections import deque from dataclasses import dataclass, field from datetime import datetime +from io import StringIO from types import FrameType -from typing import Any, Awaitable, Callable, Generic, Optional, TypeVar, Union -from typing import Coroutine, Deque, Dict, List, Tuple, Type -from typing import cast +from typing import Any, Awaitable, Callable, Generic, Literal, Optional, Sequence, TypeVar, Union +from typing import Coroutine, Deque, Dict, List, Set, Tuple, Type +from typing import cast, overload from .exceptions import error, Timeout, Stop from .log import log from .ignore import Ignore @@ -50,13 +51,26 @@ def __init__ (self, maxsize: int = 0) -> None: def __len__ (self) -> int: return len (self.queue) + + async def _wake_putter (self) -> None: + if self.maxsize > 0 and self.putter and len (self.queue) < self.maxsize: + for future in self.putter: + if not future.done (): + future.set_result (None) + await asyncio.sleep (0) + + async def remove (self, value: _T) -> None: + with Ignore (ValueError): + while self.queue: + self.queue.remove (value) + await self._wake_putter () async def get (self, predicate: Optional[Callable[[_T], bool]] = None) -> _T: if self.queue: for element in self.queue: if predicate is None or predicate (element): self.queue.remove (element) - await asyncio.sleep (0) + await self._wake_putter () return element future: asyncio.Future[_T] = self.loop.create_future () entry: Queue.Entry[_T] = Queue.Entry (predicate = predicate, future = future) @@ -121,14 +135,10 @@ async def put (self, element: _T, rejected: bool = False, key: Optional[Callable self.queue.remove (element) break # - if self.maxsize > 0 and self.putter and len (self.queue) < self.maxsize: - for future in self.putter: - if not future.done (): - future.set_result (None) - await asyncio.sleep (0) + await self._wake_putter () class AIORuntime (Runtime): - __slots__ = ['_loop', '_stop', '_channels', '_tasks'] + __slots__ = ['_loop', '_stop', '_apply_delay', '_channels', '_tasks'] with Ignore (ImportError): import uvloop asyncio.set_event_loop_policy (uvloop.EventLoopPolicy ()) @@ -146,7 +156,12 @@ def __len__ (self) -> int: def delete (self) -> None: self.ref.delete_channel (self.name) - + + async def remove (self, value: _T) -> None: + await self.queue.remove (value) + if not self.queue: + self.event.clear () + async def get (self, predicate: Optional[Callable[[_T], bool]] = None) -> _T: rc = await self.queue.get (predicate = predicate) if not len (self.queue): @@ -165,9 +180,14 @@ def __aiter__ (self) -> AIORuntime.Channel[_T]: return self async def __anext__ (self) -> _T: - if not self.ref.running: - raise StopAsyncIteration () - return await self.get () + while self.ref.running: + done, pending = await asyncio.wait ([self.ref._stop, self.get ()], return_when = asyncio.FIRST_COMPLETED) + Stream (pending).filter (lambda f: f is not self.ref._stop).each (lambda f: f.cancel ()) + if self.ref._stop in done: + break + if done: + return cast (_T, await done.pop ()) + raise StopAsyncIteration () class Channels: __slots__ = ['ref', 'channels'] @@ -228,24 +248,58 @@ def __str__ (self) -> str: else: status = f'running {self.task.get_coro ()}' return f'{self.task.get_name ()}: {status}, started {self.started:%c}' - + __repr__ = __str__ - async def join (self) -> _T: + @overload + async def join (self, timeout: None = ..., silent: Literal[False] = ...) -> _T: ... + @overload + async def join (self, timeout: None = ..., silent: Literal[True] = ...) -> Optional[_T]: ... + @overload + async def join (self, timeout: Union[int, float] = ..., silent: bool = ...) -> Optional[_T]: ... + async def join (self, timeout: Union[None, int, float] = None, silent: bool = False) -> Optional[_T]: try: - return await self.task + if timeout is None: + return await self.task + return await asyncio.wait_for (self.task, timeout) + except asyncio.exceptions.TimeoutError: + if not silent: + raise error (f'{self.name}: timeout') except asyncio.exceptions.CancelledError: - raise error (f'{self.name}: canceled') + if not silent: + raise error (f'{self.name}: canceled') + except Exception as e: + self._log_exception (logger.error, e) + if not silent: + raise error (f'{self.name}: exception {e}') + return None def cancel (self) -> None: - self.task.cancel () + if not self.task.done (): + self.task.cancel () + elif (e := self.task.exception ()) is not None: + logger.info (f'cancel {self.name}: task already terminated due to exception') + self._log_exception (logger.info, e) + elif self.task.cancelled (): + logger.debug (f'cancel {self.name}: task already cancelled') + def _log_exception (self, method: Callable[[str], None], e: BaseException) -> None: + method (f'{self.name}: {e}') + buffer = StringIO () + self.task.print_stack (file = buffer) + for line in buffer.getvalue ().strip ().split ('\n'): + method (line) + class Tasks: __slots__ = ['tasks'] def __init__ (self) -> None: self.tasks: Dict[str, AIORuntime.Task[Any]] = {} def task (self, name: str, coro: Coroutine[Any, Any, _T]) -> AIORuntime.Task[_T]: - self.tasks[name] = task = AIORuntime.Task (name, coro, asyncio.create_task (coro, name = name)) + task = AIORuntime.Task (name, coro, asyncio.create_task (coro, name = name)) + return self.add (name, task) + + def add (self, name: str, task: AIORuntime.Task[_T]) -> AIORuntime.Task[_T]: + self.tasks[name] = task def remover (task: asyncio.Task[_T]) -> None: with Ignore (KeyError): myself = self.tasks.pop (task.get_name ()) @@ -265,7 +319,7 @@ def cancel (self, name: str) -> None: logger.info (f'{task}: cancelled') def terminate (self) -> None: - Stream (self.tasks).gather ().each (lambda n: self.cancel (n)) + Stream (self.tasks).drain ().each (lambda n: self.cancel (n)) async def watch (self, timeout: Union[None, int, float] = None, only_exceptions: bool = False) -> Optional[AIORuntime.Task[Any]]: if self.tasks: @@ -305,6 +359,7 @@ async def execution () -> None: self.setsignal (signal.SIGUSR1, self.signal_handler_stats) self._loop = asyncio.get_running_loop () self._stop = self.future (type (None)) + self._apply_delay: Set[asyncio.Task[Any]] = set () await self.controller () self._tasks.terminate () myself = asyncio.current_task () @@ -326,7 +381,14 @@ def future (self, t: Type[_T]) -> asyncio.Future[_T]: return self._loop.create_future () async def delay (self, timeout: Union[int, float]) -> bool: - await asyncio.wait ([self._stop], timeout = timeout if timeout > 0.0 else 0) + if self.running: + task = asyncio.current_task () + apply_delay = task in self._apply_delay + done, _ = await asyncio.wait ([self._stop], timeout = timeout if timeout > 0.0 and apply_delay else 0) + if done: + self.running = False + elif not apply_delay and task is not None: + self._apply_delay.add (task) return self.running async def wait (self, *awaitables: Awaitable[_U], timeout: Union[None, int, float] = None) -> Tuple[Awaitable[_U], _U]: @@ -357,12 +419,70 @@ def task (self, name: str, coro: Coroutine[Any, Any, _T]) -> AIORuntime.Task[_T] def cancel_task (self, name: str) -> None: self._tasks.cancel (name) + async def wait_task (self, tasks: List[AIORuntime.Task[_U]], timeout: Union[None, int, float] = None) -> Tuple[Optional[AIORuntime.Task[_U]], Optional[_U]]: + with Ignore (Timeout): + awaitables = [cast (Awaitable[_U], _t.task) for _t in tasks] + aw, rc = await self.wait (*awaitables, timeout = timeout) + return (tasks[awaitables.index (aw)], rc) + return (None, None) + async def watch (self, timeout: Union[None, int, float] = None, only_exceptions: bool = False) -> Optional[AIORuntime.Task[Any]]: with Ignore (Timeout, Stop): return (await self.wait (self._tasks.watch (only_exceptions = only_exceptions), timeout = timeout))[1] return None + @dataclass + class Process: + name: str + args: Sequence[str] + future: asyncio.Future[None] + process: Optional[asyncio.subprocess.Process] = None + async def launch (self) -> AIORuntime.Process: + self.process = await asyncio.create_subprocess_exec ( + *self.args, + stdin = asyncio.subprocess.PIPE, + stdout = asyncio.subprocess.PIPE, + stderr = asyncio.subprocess.PIPE + ) + self.future.set_result (None) + return self + + async def launched (self) -> None: + await self.future + + async def wait (self) -> int: + await self.launched () + if self.process is not None: + return await self.process.wait () + raise error (f'{self}: not started') + + async def communicate (self, input: Optional[bytes] = None) -> Tuple[int, bytes, bytes]: + await self.launched () + if self.process is not None: + (out, err) = await self.process.communicate (input) + returncode = self.process.returncode + return (returncode if isinstance (returncode, int) else -1, out, err) + raise error (f'{self}: not started') + + async def communicate_text (self, + input: Optional[str] = None, + input_charset: str = 'UTF-8', + output_charset: str = 'UTF-8', + errors: Optional[str] = None + ) -> Tuple[int, str, str]: + (returncode, out, err) = await self.communicate (None if not input else input.encode (input_charset)) + if errors is not None: + return (returncode, '' if not out else out.decode (output_charset, errors = errors), '' if not err else err.decode (output_charset, errors = errors)) + else: + return (returncode, '' if not out else out.decode (output_charset), '' if not err else err.decode (output_charset)) + + async def process (self, name: str, *args: str) -> AIORuntime.Process: + process = AIORuntime.Process (name, args, self._loop.create_future ()) + coro = process.launch () + self._tasks.add (name, AIORuntime.Task (name, coro, asyncio.create_task (coro, name = name))) + await process.launched () + return process + def stop (self) -> None: if not self._stop.done (): self._stop.set_result (None) - diff --git a/backend/src/script/lib/agn3/cache.py b/backend/src/script/lib/agn3/cache.py index d5d30445e..08814203a 100644 --- a/backend/src/script/lib/agn3/cache.py +++ b/backend/src/script/lib/agn3/cache.py @@ -16,11 +16,11 @@ # __all__ = ['Cache'] # -K = TypeVar ('K') -V = TypeVar ('V') -E = TypeVar ('E') +_K = TypeVar ('_K') +_V = TypeVar ('_V') +_E = TypeVar ('_E') # -class Cache (Generic[K, V]): +class Cache (Generic[_K, _V]): """Generic caching this class provides a generic caching implementation with limitiation @@ -74,10 +74,10 @@ class Cache (Generic[K, V]): 0 """ __slots__ = ['limit', 'timeout', 'active', 'count', 'cache', 'cacheline', 'fill'] - class Entry (Generic[E]): + class Entry (Generic[_E]): """Represents a single caching entry""" __slots__ = ['created', 'value'] - def __init__ (self, value: E, active: bool) -> None: + def __init__ (self, value: _E, active: bool) -> None: if active: self.created = time.time () self.value = value @@ -100,11 +100,11 @@ def __init__ (self, limit: int = 0, timeout: Union[None, int, float, str] = None self.timeout: Union[float, int] = timeout if isinstance (timeout, float) else unit.parse (timeout, -1) self.active = self.timeout >= 0.0 self.count = 0 - self.cache: Dict[K, Cache.Entry[V]] = {} - self.cacheline: Deque[K] = collections.deque () - self.fill: Optional[Callable[[K], V]] = None + self.cache: Dict[_K, Cache.Entry[_V]] = {} + self.cacheline: Deque[_K] = collections.deque () + self.fill: Optional[Callable[[_K], _V]] = None - def __getitem__ (self, key: K) -> V: + def __getitem__ (self, key: _K) -> _V: try: e = self.cache[key] if self.active and not e.valid (time.time (), self.timeout): @@ -119,7 +119,7 @@ def __getitem__ (self, key: K) -> V: return value raise - def __setitem__ (self, key: K, value: V) -> None: + def __setitem__ (self, key: _K, value: _V) -> None: if key in self.cache: self.cacheline.remove (key) else: @@ -131,7 +131,7 @@ def __setitem__ (self, key: K, value: V) -> None: self.cache[key] = self.Entry (value, self.active) self.cacheline.append (key) - def __delitem__ (self, key: K) -> None: + def __delitem__ (self, key: _K) -> None: del self.cache[key] self.cacheline.remove (key) self.count -= 1 @@ -139,7 +139,7 @@ def __delitem__ (self, key: K) -> None: def __len__ (self) -> int: return len (self.cache) - def __contains__ (self, key: K) -> bool: + def __contains__ (self, key: _K) -> bool: return key in self.cache def reset (self) -> None: @@ -148,7 +148,7 @@ def reset (self) -> None: self.cache = {} self.cacheline = collections.deque () - def remove (self, key: K) -> None: + def remove (self, key: _K) -> None: """remove ``key'' from cache, if ``key'' is in cache""" if key in self.cache: del self[key] diff --git a/backend/src/script/lib/agn3/config.py b/backend/src/script/lib/agn3/config.py index 9f04014b3..1c66821a0 100644 --- a/backend/src/script/lib/agn3/config.py +++ b/backend/src/script/lib/agn3/config.py @@ -116,7 +116,7 @@ def setup_namespace (self, **kwargs: Any) -> None: - now: current date as time.struct_time - today: current date as datetime.datetime -This method also copies the enviroment of the process and any passed +This method also copies the environment of the process and any passed keyword argument to the namespace. To further populate you can access the namespace directly using the ``ns'' attribute.""" self.ns['now'] = time.localtime () @@ -548,7 +548,7 @@ def dbget (self, 'timestamp': datetime, 'binary': bytes } - rd = CSVReader (StringIO (value.strip ()), CSVDefault if dialect is None else dialect) + rd = CSVReader (StringIO (value.strip ()), dialect = CSVDefault if dialect is None else dialect) for (lineno, row) in enumerate (rd): if db is None: table = var.split ('.')[-1] @@ -722,7 +722,7 @@ def read (self, stream: Union[None, IO[Any], str] = None) -> None: cur[block] += f'{line}\n' elif line and self.comment_pattern.match (line) is None: mtch = self.section_pattern.match (line) - if not mtch is None: + if mtch is not None: section_name = mtch.group (1) mtch = self.include_pattern.match (section_name) sname: Optional[str] @@ -748,40 +748,36 @@ def read (self, stream: Union[None, IO[Any], str] = None) -> None: if include in self.sections: for (var, val) in self.sections[include].items (): cur[var] = val - else: - mtch = self.command_pattern.match (line) - if not mtch is None: - grps = mtch.groups () - command = grps[0] - param = grps[2] - if command == 'abort': - fd.close () - while len (fds) > 0: - fds.pop (0).close () + elif (mtch := self.command_pattern.match (line)) is not None: + grps = mtch.groups () + command = grps[0] + param = grps[2] + if command == 'abort': + fd.close () + while len (fds) > 0: + fds.pop (0).close () + fd = None + elif command == 'close': + fd.close () + if fds: + fd = fds.pop (0) + else: fd = None - elif command == 'close': - fd.close () - if fds: - fd = fds.pop (0) - else: - fd = None - elif command == 'include': - nfd = open (param, 'rt') - fds.insert (0, fd) - fd = nfd + elif command == 'include': + nfd = open (param, 'rt') + fds.insert (0, fd) + fd = nfd + elif (mtch := self.data_pattern.match (line)) is not None: + (var, val) = mtch.groups () + if val == '{': + block = var + cur[var] = '' else: - mtch = self.data_pattern.match (line) - if not mtch is None: - (var, val) = mtch.groups () - if val == '{': - block = var - cur[var] = '' - else: - if len (val) > 1 and val[0] in '\'"' and val[-1] == val[0]: - val = val[1:-1] - cur[var] = val - else: - raise error (f'Unparsable line: {line}') + if len (val) > 1 and val[0] in '\'"' and val[-1] == val[0]: + val = val[1:-1] + cur[var] = val + else: + raise error (f'Unparsable line: {line}') def write_xml (self, fname: str) -> None: """Write configuration as xml file""" diff --git a/backend/src/script/lib/agn3/crontab.py b/backend/src/script/lib/agn3/crontab.py index 3b0247699..7725ff33e 100644 --- a/backend/src/script/lib/agn3/crontab.py +++ b/backend/src/script/lib/agn3/crontab.py @@ -58,7 +58,7 @@ class Crontab (IDs): obsolete entries.""" __slots__ = ['superuser'] header = re.compile ('# (DO NOT EDIT THIS FILE - edit the master and reinstall|\\([^ ]+ installed on|\\(Cron version)') - enviroment = re.compile ('^[ \t]*[^= \t]+[ \t]*=') + environment = re.compile ('^[ \t]*[^= \t]+[ \t]*=') def __init__ (self) -> None: super ().__init__ () self.superuser = os.getuid () == 0 @@ -111,8 +111,8 @@ def update (self, try: if line.strip ().startswith ('#'): raise ValueError ('comment line') - if self.enviroment.match (line) is not None: - raise ValueError ('enviroment setting') + if self.environment.match (line) is not None: + raise ValueError ('environment setting') entry = Entry.parse (line) if not remove or os.path.basename (entry.program) not in remove: for p in parsed: diff --git a/backend/src/script/lib/agn3/daemon.py b/backend/src/script/lib/agn3/daemon.py index 64364130e..734f02c7c 100644 --- a/backend/src/script/lib/agn3/daemon.py +++ b/backend/src/script/lib/agn3/daemon.py @@ -13,7 +13,6 @@ import os, stat, errno, signal, fcntl import time, pickle, mmap, subprocess import multiprocessing, logging -from abc import abstractmethod from collections import deque from dataclasses import dataclass, field from datetime import datetime @@ -29,6 +28,7 @@ from .log import log from .parser import Parsable, unit from .stream import Stream +from .tools import abstract # __all__ = ['Signal', 'Timer', 'Daemonic', 'Watchdog', 'EWatchdog', 'Daemon'] # @@ -187,11 +187,6 @@ class Daemonic: level process control. In general this (or one of its subclasses) should be subclassed and extended for the process to implement.""" __slots__ = ['running', 'signals'] - try: - devnull = os.devnull - except AttributeError: - devnull = '/dev/null' - class Channel(Generic[_T]): __slots__ = ['_reader', '_writer'] def __init__ (self) -> None: @@ -295,7 +290,7 @@ def push_to_background (self) -> bool: for fd in 0, 1, 2: with Ignore (OSError): os.close (fd) - fd = os.open (Daemonic.devnull, os.O_RDWR) + fd = os.open (os.devnull, os.O_RDWR) if fd == 0: if fcntl.fcntl (fd, fcntl.F_DUPFD) == 1: fcntl.fcntl (fd, fcntl.F_DUPFD) @@ -680,7 +675,6 @@ class Job: __slots__ = ['name', 'method', 'args', 'output', 'heartbeat', 'watchdog', 'pid', 'last', 'incarnation', 'hb', 'killed_by_heartbeat'] class Restart (Exception): """Exception to be thrown to force a restart of the process""" - pass def __init__ (self, name: str, method: Callable[..., Any], @@ -725,7 +719,7 @@ def __call__ (self) -> int: try: if self.output is not None and self.watchdog is not None: try: - self.watchdog.redirect (Daemonic.devnull, self.output) + self.watchdog.redirect (os.devnull, self.output) except Exception as e: logger.error (f'Failed to establish redirection: {e}') rc = self.method (*self.args) @@ -949,7 +943,10 @@ def wrapper () -> Any: if n in (0, termination_delay): for job in joblist: self.term (cast (int, job.pid), signal.SIGKILL if n == 0 else signal.SIGTERM) - logger.info (f'Signaled job {job.name} to terminate') + logger.info ('Signaled job {name} to terminate {how}'.format ( + name = job.name, + how = 'gracefully' if n == 0 else 'forced' + )) n -= 1 time.sleep (1) self.teardown (done) @@ -969,21 +966,19 @@ def starter (*args: Any, **kwargs: Any) -> Any: # Methods to override # # Must implement as entry point for new process - @abstractmethod def run (self, *args: Any, **kwargs: Any) -> Any: """The entry point for the legacy ``start'' method""" + abstract () # # called once during startup def startup (self, jobs: List[Watchdog.Job]) -> None: """Is called after setup, but before any child process is started""" - pass # # called once for teardonw def teardown (self, done: List[Watchdog.Job]) -> None: """Is called after all child processs are terminated, but before the watchdog exits ``done'' is the list of jobs which are known to had terminated.""" - pass # # called once when ready to terminate def terminating (self, jobs: List[Watchdog.Job], done: List[Watchdog.Job]) -> None: @@ -991,22 +986,18 @@ def terminating (self, jobs: List[Watchdog.Job], done: List[Watchdog.Job]) -> No ``jobs'' are a list of still active children and ``done'' is a list of jobs already terminated.""" - pass # # called before every starting of a process def spawning (self, job: Watchdog.Job) -> None: """Is called after ``job'' had been started""" - pass # # called after every joining of a process def joining (self, job: Watchdog.Job, ec: Daemonic.Status) -> None: """Is called after ``job'' has terminated with ``ec'' exit condition""" - pass # # called in subprocess before invoking method def started (self, job: Watchdog.Job) -> None: """Is called before invoking ``job'' entry point""" - pass # # called in subprocess after method completed def ended (self, job: Watchdog.Job, rc: Any) -> Any: @@ -1167,5 +1158,4 @@ def start_with_watchdog (self, child_methods:List[Callable[..., Any]], run_in_ba def run (self, *args: Any, **kwargs: Any) -> Any: """Entry point for implemention""" - pass diff --git a/backend/src/script/lib/agn3/db.py b/backend/src/script/lib/agn3/db.py index 78db21385..b52e8f9a8 100644 --- a/backend/src/script/lib/agn3/db.py +++ b/backend/src/script/lib/agn3/db.py @@ -14,7 +14,7 @@ from datetime import datetime, timedelta from types import TracebackType from typing import Any, Callable, Iterable, Optional, Union -from typing import Dict, Iterator, List, Tuple, Type +from typing import Dict, Iterator, List, NamedTuple, Set, Tuple, Type from typing import cast from .dbdriver import DBDriver from .dbcore import T, Row, Core, Cursor @@ -403,7 +403,55 @@ def find_tablespace (self, tablespace: Optional[str], *args: str) -> Optional[st self.release (cursor) self._tablespace_cache[tablespace] = rc return rc + + class Column (NamedTuple): + name: str + datatype: str + + class Index (NamedTuple): + name: str + columns: Union[str, List[str]] + tablespace: Optional[str] = None + def update_layout (self, + table: str, + *, + columns: Optional[List[Column]] = None, + indexes: Optional[List[Index]] = None + ) -> None: + cursor = self.check_open_cursor () + if columns: + layout = self.layout (table, normalize = True) + available: Set[str] = {_l.name for _l in layout} if layout else set () + columns = Stream (columns).filter (lambda c: c.name.lower () not in available).list () + if columns: + cursor.execute ('ALTER TABLE {table} ADD ({columns})'.format ( + table = table, + columns = (Stream (columns) + .map (lambda c: f'{c.name} {c.datatype}') + .join (', ') + ) + )) + if indexes: + available = cursor.streamc ( + cursor.qselect ( + oracle = 'SELECT index_name FROM user_indexes WHERE lower(table_name) = lower(:table)', + mysql = 'SELECT index_name FROM information_schema.statistics WHERE lower(table_name) = lower(:table) AND table_schema=(SELECT SCHEMA())', + sqlite = 'SELECT name AS index_name FROM sqlite_master WHERE lower(tbl_name) = lower(:table) AND type = \'index\'' + ), { + 'table': table + } + ).map_to (str, lambda r: r.index_name.lower ()).set () + for index in indexes: + if index.name.lower () not in available: + tablespace = self.find_tablespace (index.tablespace) + cursor.execute ('CREATE INDEX {name} ON {table} ({columns}){tablespace}'.format ( + name = index.name, + table = table, + columns = index.columns if isinstance (index.columns, str) else ', '.join (index.columns), + tablespace = f' TABLESPACE {tablespace}' if tablespace else '' + )) + def scratch_request (self, name: Optional[str] = None, layout: Optional[str] = None, @@ -422,7 +470,7 @@ def scratch_request (self, ``select'' is a select statement to fill the scratch table. ``tablespace'' is the the tablespace where the table should be created in (Oracle ony), ``unique'' a host unique ID to avoid clashes in an -active/active enviroment and if ``reuse'' is True, then an existing +active/active environment and if ``reuse'' is True, then an existing table is reused by this process, otherwise it will scan for a non existing one.""" if name is None: diff --git a/backend/src/script/lib/agn3/dbapi.py b/backend/src/script/lib/agn3/dbapi.py index 992bc4300..e42f9b396 100644 --- a/backend/src/script/lib/agn3/dbapi.py +++ b/backend/src/script/lib/agn3/dbapi.py @@ -11,8 +11,9 @@ # from __future__ import annotations from types import ModuleType -from typing import Any, Callable, Literal, Optional, Union +from typing import Any, Callable, Literal, Optional, Protocol, Union from typing import Dict, List, Tuple, Type +from typing import cast class DBAPI: class Vendor (ModuleType): @@ -27,8 +28,8 @@ class Vendor (ModuleType): ROWID: Any paramstyle: Literal['qmark', 'numeric', 'named', 'format', 'pyformat'] class Error (Exception): ... - def connect (*args: Any, **kwargs: Any) -> DBAPI.Driver: ... - class Driver: + def connect (*args: Any, **kwargs: Any) -> DBAPI.Driver: return cast (DBAPI.Driver, None) + class Driver (Protocol): def commit (self) -> None: ... def rollback (self) -> None: ... def close (self) -> None: ... @@ -43,7 +44,7 @@ def create_aggregate (self, name: str, number_of_parameter: int, cls: Type[Any]) class DriverOracle (Driver): stmtcachesize: int autocommit: int - class Cursor: + class Cursor (Protocol): description: Optional[List[Tuple[str, Any, Optional[int], Optional[int], Optional[int], Optional[int], Optional[bool]]]] rowcount: int arraysize: int diff --git a/backend/src/script/lib/agn3/dbconfig.py b/backend/src/script/lib/agn3/dbconfig.py index 455ba6328..4f6382777 100644 --- a/backend/src/script/lib/agn3/dbconfig.py +++ b/backend/src/script/lib/agn3/dbconfig.py @@ -40,7 +40,7 @@ def __init__ (self, dbid: Optional[str], param: Optional[str] = None) -> None: self.dbid = dbid self.data: Dict[str, str] = {} if param is not None: - for elem in [_p.strip () for _p in param.split (',')]: + for elem in [_p.strip () for _p in param.split (', ')]: elem = elem.strip () parts = [_e.strip () for _e in elem.split ('=', 1)] if len (parts) == 2: diff --git a/backend/src/script/lib/agn3/dbcore.py b/backend/src/script/lib/agn3/dbcore.py index e3089596e..acd80fab5 100644 --- a/backend/src/script/lib/agn3/dbcore.py +++ b/backend/src/script/lib/agn3/dbcore.py @@ -10,8 +10,7 @@ #################################################################################################################################################################################################################################################################### # from __future__ import annotations -import base64, re, keyword, json -from abc import abstractmethod +import os, base64, re, keyword from collections import namedtuple, defaultdict from datetime import datetime from itertools import zip_longest @@ -27,6 +26,7 @@ from .ignore import Ignore from .io import expand_path, normalize_path from .stream import Stream +from .tools import abstract # __all__ = ['Row', 'Cursor', 'DBType', 'Core'] # @@ -446,40 +446,29 @@ def logging (self, log: Optional[Callable[[str], None]]) -> Core.LogState: def setup (self, cfg: DBConfig.DBRecord) -> None: """hook to add driver specific setup code""" if (connect_config := cfg ('py-connect')) is not None: - if connect_config.startswith ('@'): - with open (expand_path (connect_config[1:])) as fd: - connect_config = fd.read () try: - content = base64.b64decode (connect_config, validate = True).decode ('UTF-8') - except: - content = connect_config - try: - value = eval (content, { - 'driver': self.driver, - 'datetime': datetime, - 'expand': expand_path, - 'normalize': normalize_path, - 'syscfg': syscfg, - 'fqdn': fqdn, - 'program': program, - 'base': base, - 'home': home, - 'user': user - }) - except Exception as e: - try: - value = json.loads (Stream (content.split ('\n')) - .filter (lambda s: not s.lstrip ().startswith ('#')) - .join ('\n') - ) - except: - raise error (f'py-connect: "{content}" is no valid expiression: {e}') - - if value: - if isinstance (value, dict): + value = syscfg.as_config ( + connect_config, + namespace = { + 'driver': self.driver, + 'datetime': datetime, + 'expand': expand_path, + 'normalize': normalize_path, + 'syscfg': syscfg, + 'fqdn': fqdn, + 'program': program, + 'base': base, + 'home': home, + 'user': user + }, + path_namespace = os.environ.copy (), + path_base = os.path.dirname (os.path.abspath (DBConfig.default_config_path)) + ) + except error as e: + raise error (f'py-connect: failed to parse "{connect_config}": {e}') + else: + if value: self.connect_options.update (value) - else: - raise error (f'py-connect: "{content}" expected a dict, parsed to a {type (value)}') def log (self, message: str) -> None: if self.logger is not None: @@ -747,9 +736,9 @@ def streamc (self, *args: Any, **kwargs: Any) -> Stream[Row]: cursor.close () return Stream (result) - @abstractmethod def connect (self) -> None: """Establish a connection to the database""" + abstract () class Binary: """Generic wrapper to represent binaries""" diff --git a/backend/src/script/lib/agn3/email.py b/backend/src/script/lib/agn3/email.py index e0ba527d4..bb3c0ecce 100644 --- a/backend/src/script/lib/agn3/email.py +++ b/backend/src/script/lib/agn3/email.py @@ -37,10 +37,31 @@ from .stream import Stream from .uid import UID, UIDHandler # -__all__ = ['EMail', 'CSVEMail', 'StatusMail', 'ParseMessageID', 'ParseEMail', 'EMailValidator'] +__all__ = ['sendmail', 'EMail', 'CSVEMail', 'StatusMail', 'ParseMessageID', 'ParseEMail', 'EMailValidator'] # logger = logging.getLogger (__name__) # +class SentStatus (NamedTuple): + status: bool = False + return_code: int = 0 + command_output: str = '' + command_error: str = '' + +def sendmail (recipients: List[str], mail: Union[str, bytes], sender: Optional[str] = None) -> SentStatus: + """Send out mail by invoking MTA CLI""" + command = syscfg.sendmail ( + recipients = recipients, + sender = sender + ) + pp = subprocess.Popen (command, stdin = subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.PIPE) + (out, err) = pp.communicate (mail.encode ('UTF-8') if isinstance (mail, str) else mail) + return SentStatus ( + status = pp.returncode == 0, + return_code = pp.returncode, + command_output = out.decode ('UTF-8', errors = 'backslashreplace') if out else '', + command_error = err.decode ('UTF-8', errors = 'backslashreplace') if err else '' + ) + class EMail (IDs): """Create multipart E-Mails""" __slots__ = [ @@ -89,6 +110,13 @@ def as_string (msg: EmailMessage, unixfrom: bool) -> str: return method (unixfrom, policy = EMail.nofold_policy) except Exception: return method (unixfrom, policy = compat32) + @staticmethod + def as_bytes (msg: EmailMessage, unixfrom: bool) -> bytes: + as_string = EMail.as_string (msg, unixfrom) + try: + return as_string.encode ('ascii') + except UnicodeEncodeError: + return as_string.encode ('UTF-8') @staticmethod def sign (message: str, sender: Optional[str] = None, company_id: Optional[int] = None) -> str: @@ -283,20 +311,20 @@ def __finalize_header (self) -> Tuple[List[str], str]: avail_headers: Set[str] = set () for head in self.headers: (name, header) = self.__cleanup_header (head) - if name is not None and not name.startswith ('content-') and not name in ('mime-version', ): + if name is not None and not name.startswith ('content-') and name != 'mime-version': headers.append (header) avail_headers.add (name) - if not 'from' in avail_headers and self.sender: + if 'from' not in avail_headers and self.sender: headers.append (f'From: {self.sender}') for (hid, sid) in [('to', self.TO), ('cc', self.CC)]: - if not hid in avail_headers: + if hid not in avail_headers: recvs = [_r[1] for _r in self.receivers if _r[0] == sid] if recvs: headers.append ('{name}: {receivers}'.format ( name = hid.capitalize (), receivers = ', '.join (recvs) )) - if not 'subject' in avail_headers and self.subject: + if 'subject' not in avail_headers and self.subject: headers.append (f'Subject: {self.subject}') charset = self.charset if self.charset is not None else 'UTF-8' nheaders = [] @@ -369,9 +397,8 @@ def build_mail (self) -> str: del msg['MIME-Version'] return EMail.sign (EMail.as_string (root, False) + '\n', sender = self.sender, company_id = self.company_id) - def send_mail (self) -> Tuple[bool, int, str, str]: + def send_mail (self) -> SentStatus: """Build and send the mail""" - (status, returnCode, out, err) = (False, 0, None, None) mail = self.build_mail () mfrom: Optional[str] = None if self.mfrom is not None: @@ -379,16 +406,12 @@ def send_mail (self) -> Tuple[bool, int, str, str]: mfrom = self.mfrom elif self.sender: mfrom = parseaddr (self.sender)[1] - cmd = syscfg.sendmail ( - [parseaddr (_r[1])[1] for _r in self.receivers], + status = sendmail ( + recipients = [parseaddr (_r[1])[1] for _r in self.receivers], + mail = mail, sender = mfrom ) - pp = subprocess.Popen (cmd, stdin = subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.PIPE, text = True, errors = 'backslashreplace') - (out, err) = pp.communicate (mail) - returnCode = pp.returncode - if returnCode == 0: - status = True - return (status, returnCode, out, err) + return status # EMail.force_encoding ('UTF-8', 'qp') # @@ -406,7 +429,7 @@ def __init__ (self, receivers: List[str], subject: Optional[str], text: Optional[str], - data: List[Union[List[Any], Tuple[Any]]], + data: List[Union[List[Any], Tuple[Any, ...]]], filename: Optional[str] = None, charset: Optional[str] = None, dialect: str = CSVDefault @@ -526,7 +549,7 @@ def add_logfile (self, pos = content.rfind ('\n') if pos > 0: content = content[:pos] - content += '\n...\n'; + content += '\n...\n' else: content = fd.read () else: diff --git a/backend/src/script/lib/agn3/emm/_ams.py b/backend/src/script/lib/agn3/emm/_ams.py index 7313fa38e..8994ae058 100644 --- a/backend/src/script/lib/agn3/emm/_ams.py +++ b/backend/src/script/lib/agn3/emm/_ams.py @@ -8,3 +8,8 @@ # You should have received a copy of the GNU Affero General Public License along with this program. If not, see . # # # #################################################################################################################################################################################################################################################################### +# +__all__ = ['AMS', 'AMSLock'] +# +class AMS: pass +from ._amsmock import AMSLock diff --git a/backend/src/script/lib/agn3/emm/bounce.py b/backend/src/script/lib/agn3/emm/bounce.py index fb51793f3..3f329cb29 100644 --- a/backend/src/script/lib/agn3/emm/bounce.py +++ b/backend/src/script/lib/agn3/emm/bounce.py @@ -39,6 +39,7 @@ class Bounce: bav_rule_legacy_path: Final[str] = os.path.join (base, 'lib', 'bav.rule') bounce_rule_table: Final[str] = 'bounce_rule_tbl' bounce_config_table: Final[str] = 'bounce_config_tbl' + ote_table: Final[str] = 'one_time_provider_tbl' name_conversion: Final[str] = 'conversion' name_company_info_conversion: Final[str] = 'bounce-conversion-parameter' epoch: Final[datetime] = datetime (1970, 1, 1) diff --git a/backend/src/script/lib/agn3/emm/config.py b/backend/src/script/lib/agn3/emm/config.py index d726981aa..b3f986e40 100644 --- a/backend/src/script/lib/agn3/emm/config.py +++ b/backend/src/script/lib/agn3/emm/config.py @@ -304,16 +304,28 @@ def update (self, class_name: str, name: str, values: Union[str, List[str]], des return self.write (class_name, name, listjoin (new_values), description, hostname) class EMMCompany (_Config): - __slots__ = ['keys', 'company_id', 'company_info'] + __slots__ = ['keys', 'company_id', 'company_info', 'enabled_cache'] class Value (NamedTuple): company_id: int name: str value: str + @dataclass + class Enable: + enabled: Dict[int, bool] = field (default_factory = dict) + default: bool = False + enabled0: bool = False + def __post_init__ (self) -> None: + self.enabled0 = self.enabled.get (0, self.default) + + def __call__ (self, company_id: int) -> bool: + return self.enabled.get (company_id, self.enabled0) + def __init__ (self, db: Optional[DB] = None, reread: Parsable = None, selection: Optional[Systemconfig.Selection] = None, keys: Optional[Sequence[str]] = None) -> None: super ().__init__ (db, reread, selection) self.keys = keys self.company_id: Optional[int] = None self.company_info: DefaultDict[int, Dict[str, str]] = defaultdict (dict) + self.enabled_cache: Dict[Tuple[str, bool], EMMCompany.Enable] = {} def __enter__ (self) -> EMMCompany: return self @@ -323,6 +335,7 @@ def __exit__ (self, exc_type: Optional[Type[BaseException]], exc_value: Optional def retrieve (self, db: DB) -> None: self.company_info.clear () + self.enabled_cache.clear () collect: DefaultDict[Tuple[int, str], Dict[Optional[str], str]] = defaultdict (dict) query = ( 'SELECT company_id, cname, cvalue, hostname ' @@ -395,6 +408,18 @@ def scan_all (self) -> Iterator[EMMCompany.Value]: for (key, value) in company_config.items (): yield EMMCompany.Value (company_id, key, value) + def enabled (self, key: str, default: bool = False) -> EMMCompany.Enable: + try: + rc = self.enabled_cache[(key, default)] + except KeyError: + rc = self.enabled_cache[(key, default)] = EMMCompany.Enable (Stream (self.scan_all ()) + .filter (lambda v: v.name == key) + .map (lambda v: (v.company_id, atob (v.value))) + .dict (), + default = default + ) + return rc + def write (self, company_id: int, name: str, @@ -545,7 +570,7 @@ def mailer_stream (self) -> Stream[EMMMailerset.Mailer]: def mailerset_stream (self) -> Stream[EMMMailerset.Mailerset]: self.check () return Stream (self.mailersets.values ()).sorted (lambda m: m.id) - + class Responsibility (_Config): def __init__ (self, *args: Any, **kws: Any) -> None: pass def __enter__ (self) -> Responsibility: return self diff --git a/backend/src/script/lib/agn3/emm/metafile.py b/backend/src/script/lib/agn3/emm/metafile.py index 95142ae46..f0dc4876f 100644 --- a/backend/src/script/lib/agn3/emm/metafile.py +++ b/backend/src/script/lib/agn3/emm/metafile.py @@ -55,18 +55,13 @@ def __init__ (self, path: str) -> None: self.__error ('Invalid format of input file') else: self.valid = True - n = parts[0].find ('-') - if n != -1: - try: - self.licence_id = int (parts[0][n + 1:]) - except ValueError: - self.licence_id = -1 - self.__error (f'Unparsable licence ID in "{parts[0]}" found') - else: + try: + (_, licence_expr) = parts[0].split ('-') + self.licence_id = int (licence_expr) + except ValueError: self.licence_id = licence - cinfo = parts[2].split ('-') try: - self.company_id = int (cinfo[0]) + self.company_id = int (parts[2].split ('-', 1)[0]) except ValueError: self.company_id = -1 self.__error (f'Unparseable company ID in "{parts[2]}" found') diff --git a/backend/src/script/lib/agn3/emm/statd.py b/backend/src/script/lib/agn3/emm/statd.py index b15063695..86178dc45 100644 --- a/backend/src/script/lib/agn3/emm/statd.py +++ b/backend/src/script/lib/agn3/emm/statd.py @@ -27,7 +27,7 @@ def list_incoming (self) -> List[str]: ... def list_outgoing (self, licence: int, server: str, claim_all: bool = False) -> List[str]: ... def stat_file (self, fname: str) -> str: ... def proxy (self, contents: List[str]) -> List[str]: ... -class AIO_StatdProxy (AIO_XMLRPCProtocol): +class AIO_StatdProxy (AIO_XMLRPCProtocol, Protocol): async def whoami (self) -> str: ... async def get_last_error (self) -> str: ... async def count_free (self) -> int: ... diff --git a/backend/src/script/lib/agn3/emm/timestamp.py b/backend/src/script/lib/agn3/emm/timestamp.py index 3746e6e5b..b1b240035 100644 --- a/backend/src/script/lib/agn3/emm/timestamp.py +++ b/backend/src/script/lib/agn3/emm/timestamp.py @@ -74,7 +74,7 @@ def __setup (self, db: Optional[DB]) -> None: count = self.db.querys ('SELECT count(*) FROM timestamp_tbl WHERE name = :name', self.parm) if count is None or not count[0]: rc = self.db.querys ('SELECT max(timestamp_id) + 1 FROM timestamp_tbl') - if not rc is None and not rc[0] is None: + if rc is not None and rc[0] is not None: tid = rc[0] else: tid = 1 @@ -135,7 +135,7 @@ def setup (self, db: Optional[DB] = None, timestamp: Any = None) -> None: if cast (DB, self.db).update ('UPDATE timestamp_tbl SET temp = :ts WHERE name = :name', parm) != 1: raise error ('Failed to setup timestamp for current time') rc = cast (DB, self.db).querys ('SELECT cur, temp FROM timestamp_tbl WHERE name = :name', self.parm) - if not rc is None: + if rc is not None: (self.lowmark, self.highmark) = rc cast (DB, self.db).sync () diff --git a/backend/src/script/lib/agn3/emm/types.py b/backend/src/script/lib/agn3/emm/types.py index 1277229bc..f743f1e06 100644 --- a/backend/src/script/lib/agn3/emm/types.py +++ b/backend/src/script/lib/agn3/emm/types.py @@ -14,6 +14,10 @@ # class MediaType (Enum): EMAIL = 0 + @classmethod + def valid (cls, mediatype: int) -> bool: + return mediatype in {_m.value for _m in cls.__members__.values ()} + class UserStatus (Enum): ACTIVE = 1 BOUNCE = 2 @@ -21,13 +25,25 @@ class UserStatus (Enum): OPTOUT = 4 WAITCONFIRM = 5 BLOCKLIST = 6 + SUSPEND = 7 @classmethod def find_status (cls, name: str) -> UserStatus: return cls.__members__[name.upper ()] class WorkStatus (Enum): + New = 'mailing.status.new' + Admin = 'mailing.status.admin' + Active = 'mailing.status.active' + Disable = 'mailing.status.disable' + Cancel = 'mailing.status.canceled' + CancelCopy = 'mailing.status.canceledAndCopied' + Edit = 'mailing.status.edit' + Generating = 'mailing.status.in-generation' Finished = 'mailing.status.generation-finished' + NoRecipient = 'mailing.status.norecipients' + Ready = 'mailing.status.ready' + Scheduled = 'mailing.status.scheduled' Sending = 'mailing.status.sending' Sent = 'mailing.status.sent' - CancelCopy = 'mailing.status.canceledAndCopied' + Test = 'mailing.status.test' diff --git a/backend/src/script/lib/agn3/io.py b/backend/src/script/lib/agn3/io.py index 9650b3dfd..28c3ec478 100644 --- a/backend/src/script/lib/agn3/io.py +++ b/backend/src/script/lib/agn3/io.py @@ -131,25 +131,30 @@ def normalize_path (path: str) -> str: """expand and normalize a filesystem path relative to home directory""" return expand_path (path, base_path = base) -def create_path (path: str, mode: int = 0o777) -> None: - """create a path and all missing elements""" - if not os.path.isdir (path): - try: - os.mkdir (path, mode) - except OSError as e: - if e.args[0] != errno.EEXIST: - if e.args[0] != errno.ENOENT: - raise error (f'Failed to create {path}: {e}') - elements = path.split (os.path.sep) - target = '' - for element in elements: - target += element - if target and not os.path.isdir (target): - try: - os.mkdir (target, mode) - except OSError as e: - raise error (f'Failed to create {path} at {target}: {e}') - target += os.path.sep +def create_path (path: str, mode: int = 0o777) -> bool: + """create a path and all missing elements + +returns ``False'' if ``path''' already exists and ``True'' if +``path'' had been created. On failure, an exception is raised. +""" + if os.path.isdir (path): + return False + try: + os.mkdir (path, mode) + except OSError as e: + if e.errno == errno.EEXIST or e.errno != errno.ENOENT: + raise error (f'failed to create already existing {path}: {e}') + elements = path.split (os.path.sep) + target = '' + for element in elements: + target += element + if target and not os.path.isdir (target): + try: + os.mkdir (target, mode) + except OSError as e: + raise error (f'failed to create {path} at {target}: {e}') + target += os.path.sep + return True class ArchiveDirectory: __slots__: List[str] = [] @@ -400,7 +405,7 @@ def file_access (path: str) -> Tuple[List[str], List[List[Any]]]: cpath = os.readlink (cpath) else: fpath = cpath - if not fpath is None and st[stat.ST_DEV] == device and st[stat.ST_INO] == inode: + if fpath is not None and st[stat.ST_DEV] == device and st[stat.ST_INO] == inode: rc.append (pid) seen[check] = True except OSError as e: @@ -676,7 +681,7 @@ def open (self, stream: Union[str, IO[Any]], mode: _modes, bom_charset: Optional def done (self) -> None: """cleanup resources""" - if not self.fd is None: + if self.fd is not None: if not self.foreign: self.fd.close () else: @@ -750,14 +755,17 @@ def __init__ (self, ``relaxed'' is True, errors are ignored otherwise raised (as implemented by the csv.DictWriter module)""" super ().__init__ (stream, mode, bom_charset, header) - self.writer = csv.DictWriter (self.fd, field_list, dialect = dialect, extrasaction = 'ignore' if relaxed else 'raise') + if self.fd is not None: + self.writer = csv.DictWriter (self.fd, field_list, dialect = dialect, extrasaction = 'ignore' if relaxed else 'raise') + else: + raise error (f'failed to open {stream}') class _CSVReader (CSVIO): __slots__ = ['reader'] - def __init__ (self, stream: Union[str, IO[Any]]) -> None: + def __init__ (self, stream: Union[str, IO[Any]], *, mode: _ReadModes) -> None: super ().__init__ () self.reader: Any = None - self.open (stream, 'r') + self.open (stream, mode) def __iter__ (self) -> Iterator[Tuple[Any, ...]]: return iter (self.reader) @@ -778,10 +786,10 @@ def stream (self) -> Stream[List[str]]: class CSVReader (_CSVReader): """Wrapper to read a CSV file""" __slots__: List[str] = [] - def __init__ (self, stream: Union[str, IO[Any]], dialect: str) -> None: + def __init__ (self, stream: Union[str, IO[Any]], *, dialect: str, mode: _ReadModes = 'r') -> None: """for meaning of ``stream'' and ``bom_charset''see CSVIO.open (), for availble ``dialect'' values see CSVIO.__doc__""" - super ().__init__ (stream) + super ().__init__ (stream, mode = mode) self.reader = csv.reader (cast (IO[Any], self.fd), dialect = dialect) class CSVDictReader (_CSVReader): @@ -791,6 +799,8 @@ def __init__ (self, stream: Union[str, IO[Any]], field_list: List[str], dialect: str, + *, + mode: _ReadModes = 'r', rest_key: Optional[str] = None, rest_value: Any = None ) -> None: @@ -798,16 +808,16 @@ def __init__ (self, CSVIO.open (), for availble ``dialect'' values see CSVIO.__doc__, ``field_list'', ``rest_key'' and ``rest_value'' are passed to csv.DictReader to fill the dictionary.""" - super ().__init__ (stream) + super ().__init__ (stream, mode = mode) self.reader = csv.DictReader (cast (IO[Any], self.fd), field_list, dialect = dialect, restkey = rest_key, restval = rest_value) class CSVAutoDictReader (_CSVReader): """Wrapper to a read a CSV file as dict, determinating the field list form the first header line of the CSV""" __slots__: List[str] = [] - def __init__ (self, stream: Union[str, IO[Any]], dialect: str) -> None: + def __init__ (self, stream: Union[str, IO[Any]], dialect: str, *, mode: _ReadModes = 'r') -> None: """for meaning of ``stream'' and ``bom_charset''see CSVIO.open (), for availble ``dialect'' values see CSVIO.__doc__""" - super ().__init__ (stream) + super ().__init__ (stream, mode = mode) header = next (csv.reader (cast (IO[Any], self.fd), dialect = dialect)) self.reader = csv.DictReader (cast (IO[Any], self.fd), header, dialect = dialect) @@ -816,9 +826,10 @@ class CSVNamedReader: def __init__ (self, stream: Union[str, IO[Any]], dialect: str, - *fields: Field + *fields: Field, + mode: _ReadModes = 'r' ) -> None: - self.csv = CSVReader (stream, dialect) + self.csv = CSVReader (stream, dialect = dialect, mode = mode) header = next (self.csv.reader) if header is not None: typ = cast (Type[Line], namedtuple ('record', header)) diff --git a/backend/src/script/lib/agn3/janitor.py b/backend/src/script/lib/agn3/janitor.py index c64536a39..785dd7a0f 100644 --- a/backend/src/script/lib/agn3/janitor.py +++ b/backend/src/script/lib/agn3/janitor.py @@ -11,13 +11,12 @@ # from __future__ import annotations import os, logging, time, datetime, stat -from abc import abstractmethod from typing import Callable, Optional from typing import List, NamedTuple, Pattern, Set, Tuple from .definitions import base from .io import which from .pattern import isnum -from .tools import sizefmt, call +from .tools import abstract, sizefmt, call # __all__ = ['Janitor'] # @@ -371,7 +370,7 @@ def run (self, doit: bool) -> None: self.execute () self.done () - @abstractmethod def execute (self) -> None: """Execute the cleanup, must be overwritten by subclass""" + abstract () diff --git a/backend/src/script/lib/agn3/log.py b/backend/src/script/lib/agn3/log.py index 28c9229e7..517c1dde7 100644 --- a/backend/src/script/lib/agn3/log.py +++ b/backend/src/script/lib/agn3/log.py @@ -212,7 +212,7 @@ def data_filename (self, name: str, epoch: Union[None, int, float] = None, ts: O name is part of the final filename""" return self.__make_filename (ts, epoch, name) - def append (self, s: str) -> None: + def append (self, s: Union[str, Tuple[str, ...], List[str]]) -> None: """Writes data to a logfile s may either be a string or a list or tuple containing strings. If it @@ -225,8 +225,8 @@ def append (self, s: str) -> None: if isinstance (s, str): fd.write (s) elif isinstance (s, list) or isinstance (s, tuple): - for l in s: - fd.write (l) + for line in s: + fd.write (line) else: fd.write (str (s) + '\n') fd.close () diff --git a/backend/src/script/lib/agn3/mta.py b/backend/src/script/lib/agn3/mta.py index 854d68072..4422ee2ce 100644 --- a/backend/src/script/lib/agn3/mta.py +++ b/backend/src/script/lib/agn3/mta.py @@ -18,7 +18,9 @@ from .log import log from .tools import call, listsplit # -__all__ = ['MTA'] +__all__ = [ + 'MTA' +] # logger = logging.getLogger (__name__) # @@ -75,12 +77,8 @@ def __getitem__ (self, key: str) -> str: def getlist (self, key: str) -> List[str]: """returns the value for ``key'' as list""" return list (listsplit (self[key])) - - def __call__ (self, path: str, **kwargs: str) -> bool: - """``path'' is the file to process -kwargs may contain other parameter required or optional used by specific -instances of mail creation""" + def make (self, path: str, **kwargs: str) -> List[str]: generate = [ f'account-logfile={base}/log/account.log', f'bounce-logfile={base}/log/extbounce.log', @@ -110,13 +108,20 @@ def __call__ (self, path: str, **kwargs: str) -> bool: 'queue-flush={count}'.format (count = kwargs.get ('flush_count', '2')), f'queue-flush-command={base}/bin/fqu.sh' ] - cmd = [ + return [ self.xmlback, '-l', '-o', 'generate:{generate}'.format (generate = ';'.join (generate)), '-L', log.get_loglevel (default = 'info'), path ] + + def __call__ (self, path: str, **kwargs: str) -> bool: + """``path'' is the file to process + +kwargs may contain other parameter required or optional used by specific +instances of mail creation""" + cmd = self.make (path, **kwargs) logger.debug (f'{cmd} starting') pp = subprocess.Popen (cmd, stdin = subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.PIPE, text = True, errors = 'backslashreplace') (out, err) = pp.communicate (None) diff --git a/backend/src/script/lib/agn3/parser.py b/backend/src/script/lib/agn3/parser.py index 482459343..1ce03c30e 100644 --- a/backend/src/script/lib/agn3/parser.py +++ b/backend/src/script/lib/agn3/parser.py @@ -13,10 +13,12 @@ import re, time, csv from collections import namedtuple from datetime import datetime +from functools import partial from io import StringIO from itertools import takewhile, zip_longest +from urllib.parse import quote, unquote from typing import Any, Callable, Iterable, Optional, Protocol, Union -from typing import Dict, Iterator, List, NamedTuple, Tuple, Type +from typing import Dict, Iterator, List, NamedTuple, Set, Tuple, Type from typing import cast from .exceptions import error from .ignore import Ignore @@ -226,7 +228,7 @@ class Field (NamedTuple): name: str converter: Optional[Callable[[str], Any]] = None optional: bool = False - default: Optional[Callable[[], Any]] = None + default: Optional[Callable[[str], Any]] = None source: Optional[str] = None class Lineparser: @@ -273,13 +275,13 @@ def convert (f: Field, e: Optional[str]) -> Any: return f.converter (e) if f.converter is not None else e if not f.optional: raise error (f'{f.name}: missing value') - return f.default () if f.default is not None else None + return f.default (f.name) if f.default is not None else None return self.target_class (*tuple (convert (_f, _e) for (_f, _e) in zip_longest (self.fields, elements))) except Exception as e: raise error (f'{line}: failed to parse: {e}') def make (self, **kws: Any) -> Line: - return self.target_class (*tuple (kws.get (_f.name, _f.default () if _f.default is not None else None) for _f in self.fields)) + return self.target_class (*tuple (kws.get (_f.name, _f.default (_f.name) if _f.default is not None else None) for _f in self.fields)) def from_csv (self, source: Iterable[str], dialect: Union[None, str, csv.Dialect, Type[csv.Dialect]] = None, **kws: Any) -> Iterator[Line]: rd = csv.reader (source, dialect = dialect, **kws) if dialect is not None else csv.reader (source, **kws) @@ -293,6 +295,35 @@ def as_csv (self, source: List[Any], dialect: Union[None, str, csv.Dialect, Type self.scratch.flush () return self.scratch.getvalue () +class Coder: + class Logic (NamedTuple): + encode: Callable[[str], str] + decode: Callable[[str], str] + save_encode: Optional[Callable[[str], str]] = None + save_decode: Optional[Callable[[str], str]] = None + coders: Dict[str, Logic] = { + 'default': Logic ( + encode = quote, + decode = unquote, + save_decode = lambda s: unquote (s, errors = 'backslashreplace') + ) + } + @classmethod + def find (cls, name: str, save: bool = True) -> Tuple[Callable[[str], str], Callable[[str], str]]: + logic = cls.coders[name] + encode, decode = logic.encode, logic.decode + if save and (logic.save_encode or logic.save_decode): + def save_coder (coder: Callable[[str], str], save_coder: Callable[[str], str], s: str) -> str: + try: + return coder (s) + except: + return save_coder (s) + if logic.save_encode is not None: + encode = partial (save_coder, logic.encode, logic.save_encode) + if logic.save_decode is not None: + decode = partial (save_coder, logic.decode, logic.save_decode) + return (encode, decode) + class Tokenparser: """Parse a single line into name/value tokens @@ -307,11 +338,20 @@ class Tokenparser: to get distinct results. This way, the overhead is marginal as the line itself is only parsed once and processed only for the configured target fields.""" - __slots__ = ['fields', 'target_class'] + __slots__ = ['fields', 'fieldmap', 'target_class', 'post_encoder', 'pre_decoder'] def __init__ (self, *fields: Union[Field, str]) -> None: self.fields: List[Field] = [_f if isinstance (_f, Field) else Field (_f, lambda a: a) for _f in fields] + self.fieldmap: Dict[str, Field] = Stream (self.fields).map (lambda f: (f.name, f)).dict () self.target_class = cast (Type[Line], namedtuple ('Line', tuple (_f.name for _f in self.fields))) - + self.post_encoder: Optional[Callable[[str], str]] = None + self.pre_decoder: Optional[Callable[[str], str]] = None + + def set_pre_decoder (self, name: Optional[str] = None) -> None: + if name is not None: + self.post_encoder, self.pre_decoder = Coder.find (name) + else: + self.post_encoder, self.pre_decoder = None, None + def __call__ (self, line: Union[None, str, Dict[str, str]]) -> Line: if line is None: tokens: Dict[str, str] = {} @@ -326,12 +366,30 @@ def get (field: Field, tokens: Dict[str, str]) -> Any: except KeyError: if not field.optional: raise - return field.default () if field.default is not None else None + return field.default (field.name) if field.default is not None else None return self.target_class (*tuple (get (_f, tokens) for _f in self.fields)) def parse (self, line: str) -> Dict[str, str]: return (Stream (line.split ('\t')) .map (lambda l: tuple (_l.strip () for _l in l.split ('=', 1))) .filter (lambda kv: len (kv) == 2) + .map (lambda kv: kv if self.pre_decoder is None else (kv[0], self.pre_decoder (kv[1]))) .dict () ) + + def create (self, record: Dict[str, Any]) -> str: + seen: Set[str] = set () + use: Dict[str, Any] = (Stream (record.items ()) + .filter (lambda kv: kv[0] in self.fieldmap) + .peek (lambda kv: seen.add (kv[0])) + .dict () + ) + use.update (Stream (self.fields) + .filter (lambda f: f.name not in seen and not f.optional) + .map (lambda f: (f.name, f.default ('') if f.default is not None else '')) + .dict () + ) + return (Stream (use.items ()) + .map (lambda kv: '{k}={v}'.format (k = kv[0], v = str (kv[1]) if self.post_encoder is None else self.post_encoder (str (kv[1])))) + .join ('\t') + ) diff --git a/backend/src/script/lib/agn3/plugin.py b/backend/src/script/lib/agn3/plugin.py index 21103c9d3..764527ed5 100644 --- a/backend/src/script/lib/agn3/plugin.py +++ b/backend/src/script/lib/agn3/plugin.py @@ -749,9 +749,9 @@ def names_available () -> Generator[Optional[str], None, None]: if cb.path is not None: name += f' loaded from {cb.path}' if cb.calls == 0: - self.warning (f'registered method "{name}" is never called') + self.debug (f'registered method "{name}" is never called') else: - self.verbose (f'fregistered method "{name}" is called {cb.calls} times') + self.debug (f'fregistered method "{name}" is called {cb.calls} times') self.info ('Shuting down modules') for m in reversed (self.modules): self.info (f'Shuting down {m.name}') diff --git a/backend/src/script/lib/agn3/process.py b/backend/src/script/lib/agn3/process.py index aa98027f2..82917bd4e 100644 --- a/backend/src/script/lib/agn3/process.py +++ b/backend/src/script/lib/agn3/process.py @@ -142,10 +142,10 @@ def find (self, val: Union[int, str]) -> Processentry: line = fd.readline ().strip () if line != '': pid = int (line) - if pid == None: + if pid is None: with Ignore (ValueError): pid = int (val) - if pid == None: + if pid is None: raise error (f'Given paramter "{val!r}" cannot be mapped to a pid') match = None for p in self.table: diff --git a/backend/src/script/lib/agn3/rpc.py b/backend/src/script/lib/agn3/rpc.py index 7ce180da3..a3b163431 100644 --- a/backend/src/script/lib/agn3/rpc.py +++ b/backend/src/script/lib/agn3/rpc.py @@ -14,7 +14,6 @@ import socketserver, urllib.parse, base64 import xmlrpc.server, xmlrpc.client import aiohttp_xmlrpc.client -from datetime import datetime from threading import Thread from types import FrameType, TracebackType from typing import Any, Callable, Iterable, Optional, Protocol, Union @@ -29,8 +28,6 @@ # logger = logging.getLogger (__name__) # -_marshallable = Union[None, bool, int, float, str, bytes, Tuple[Any, ...], List[Any], Dict[Any, Any], datetime] -# XMLRPCProxy = xmlrpc.client.ServerProxy XMLRPCError = xmlrpc.client.Error # @@ -251,7 +248,7 @@ def handle_error (self, request: Any, client_address: Union[Tuple[str, int], str request = request )) - def _dispatch (self, method: str, params: Iterable[_marshallable]) -> _marshallable: + def _dispatch (self, method: str, params: Iterable[Any]) -> Any: try: rc = super ()._dispatch (method, params) logger.debug (f'INVOKE {method} {params!r} = {rc!r}') diff --git a/backend/src/script/lib/agn3/runtime.py b/backend/src/script/lib/agn3/runtime.py index 09076fbf5..ced1b1af0 100644 --- a/backend/src/script/lib/agn3/runtime.py +++ b/backend/src/script/lib/agn3/runtime.py @@ -34,13 +34,13 @@ # logger = logging.getLogger (__name__) # -def _expand_inline (args: List[str], enviroment: Optional[Dict[str, str]] = None) -> List[str]: - """Expands element starting with an '@' from an enviroment variable or a file +def _expand_inline (args: List[str], environment: Optional[Dict[str, str]] = None) -> List[str]: + """Expands element starting with an '@' from an environment variable or a file >>> _expand_inline (['this', 'is', '@(sample)', 'test'], {}) Traceback (most recent call last): ... -agn3.exceptions.error: @(sample): specified enviroment not found +agn3.exceptions.error: @(sample): specified environment not found >>> _expand_inline (['this', 'is', '@(sample)', 'test'], {'sample': '-?\\n--target=destination\\nfile1\\nfile2\\n'}) ['this', 'is', '-?', '--target=destination', 'file1', 'file2', 'test'] >>> import json @@ -58,9 +58,9 @@ def _expand_inline (args: List[str], enviroment: Optional[Dict[str, str]] = None if arg.startswith ('@'): if arg.startswith ('@(') and arg.endswith (')'): try: - content = (enviroment if enviroment is not None else os.environ)[arg[2:-1]] + content = (environment if environment is not None else os.environ)[arg[2:-1]] except KeyError: - raise error (f'{arg}: specified enviroment not found') + raise error (f'{arg}: specified environment not found') else: try: with open (arg[1:]) as fd: diff --git a/backend/src/script/lib/agn3/sanity.py b/backend/src/script/lib/agn3/sanity.py index 343dc139b..51faf96ce 100644 --- a/backend/src/script/lib/agn3/sanity.py +++ b/backend/src/script/lib/agn3/sanity.py @@ -299,7 +299,7 @@ def postproc (area: str, *args: Any) -> None: virtual_env = os.environ.get ('VIRTUAL_ENV') def installer (module: str, update: bool = False) -> None: if virtual_env is None: - raise ImportError (f'{module}: not running in a virtual enviroment, missing modules are not installed on system installation') + raise ImportError (f'{module}: not running in a virtual environment, missing modules are not installed on system installation') if pip is None: raise ImportError (f'{module}: no command for installation found') n = call ([pip, '--quiet', '--no-input', '--exists-action', 'w', '--disable-pip-version-check', 'install'] + (['-U'] if update else []) + [module]) diff --git a/backend/src/script/lib/agn3/spool.py b/backend/src/script/lib/agn3/spool.py index 306c6ead7..e53169244 100644 --- a/backend/src/script/lib/agn3/spool.py +++ b/backend/src/script/lib/agn3/spool.py @@ -217,7 +217,7 @@ def __iter__ (self) -> Generator[Mailspool.Workspace, None, None]: def check_path (self, path: str) -> bool: """Check and create missing ``path''""" - if path and not path in self.path_checked: + if path and path not in self.path_checked: try: if not os.path.isdir (path): create_path (path, self.mode) diff --git a/backend/src/script/lib/agn3/stream.py b/backend/src/script/lib/agn3/stream.py index d934418f2..ad12b946b 100644 --- a/backend/src/script/lib/agn3/stream.py +++ b/backend/src/script/lib/agn3/stream.py @@ -16,28 +16,28 @@ from itertools import filterfalse, dropwhile, takewhile, islice, chain from types import TracebackType from typing import Any, Callable, Optional, Reversible, Sized, TypeVar, Union -from typing import DefaultDict, Dict, Generic, List, Match, Pattern, Set, Tuple, Type +from typing import DefaultDict, Deque, Dict, Generic, List, Match, Pattern, Set, Tuple, Type from typing import cast, overload import typing from .exceptions import error # __all__ = ['Stream'] # -T = TypeVar ('T') -O = TypeVar ('O') +_T = TypeVar ('_T') +_O = TypeVar ('_O') # -class Stream (Generic[T]): +class Stream (Generic[_T]): """Stream implementation as inspired by Java 1.8 Original based on pystreams but as this project seems to be abandoned a subset of these methods are implemented here by giving up parallel execution at all.""" __slots__ = ['iterator'] - __sentinel = cast (T, object ()) + __sentinel = cast (_T, object ()) @classmethod - def defer (cls, obj: Iterable[T], defer: Optional[Callable[[Iterable[T]], None]] = None) -> Stream[T]: + def defer (cls, obj: Iterable[_T], defer: Optional[Callable[[Iterable[_T]], None]] = None) -> Stream[_T]: """Create a stream from an iterable ``obj'' and defer cleanup to the end""" - def provider (obj: Iterable[T], defer: Optional[Callable[[Iterable[T]], None]]) -> Iterator[T]: + def provider (obj: Iterable[_T], defer: Optional[Callable[[Iterable[_T]], None]]) -> Iterator[_T]: try: for elem in obj: yield elem @@ -49,17 +49,17 @@ def provider (obj: Iterable[T], defer: Optional[Callable[[Iterable[T]], None]]) return cls (provider (obj, defer)) @classmethod - def concat (cls, *args: Iterable[T]) -> Stream[T]: - def concater (args: Iterable[Iterable[T]]) -> Iterator[T]: + def concat (cls, *args: Iterable[_T]) -> Stream[_T]: + def concater (args: Iterable[Iterable[_T]]) -> Iterator[_T]: for element in args: for subelement in element: yield subelement return cls (concater (args)) @classmethod - def merge (cls, *args: Union[T, Iterable[T]]) -> Stream[T]: + def merge (cls, *args: Union[_T, Iterable[_T]]) -> Stream[_T]: """Like concat, but use items which are not iterable as literal to the target stream""" - def merger (args: Tuple[Union[Iterable[T], T], ...]) -> Iterator[T]: + def merger (args: Tuple[Union[Iterable[_T], _T], ...]) -> Iterator[_T]: for element in args: if isinstance (element, Iterable): for subelement in element: @@ -139,7 +139,7 @@ def multichainer (*args: Any, **kwargs: Any) -> Any: return value return multichainer - def __init__ (self, iterator: Iterable[T]) -> None: + def __init__ (self, iterator: Iterable[_T]) -> None: self.iterator = iter (iterator) def __str__ (self) -> str: @@ -151,42 +151,42 @@ def __repr__ (self) -> str: def __len__ (self) -> int: return self.count () - def __iter__ (self) -> Iterator[T]: + def __iter__ (self) -> Iterator[_T]: return self.iterator - def __reversed__ (self) -> Iterator[T]: + def __reversed__ (self) -> Iterator[_T]: try: - return reversed (cast (Reversible[T], self.iterator)) + return reversed (cast (Reversible[_T], self.iterator)) except TypeError: return reversed (list (self.iterator)) - def __enter__ (self) -> Stream[T]: + def __enter__ (self) -> Stream[_T]: return self def __exit__ (self, exc_type: Optional[Type[BaseException]], exc_value: Optional[BaseException], traceback: Optional[TracebackType]) -> Optional[bool]: return None - def __contains__ (self, o: T) -> bool: + def __contains__ (self, o: _T) -> bool: return sum ((1 for _o in self.iterator if _o == o)) > 0 - def new (self, iterator: Iterable[O]) -> Stream[O]: + def new (self, iterator: Iterable[_O]) -> Stream[_O]: """Create a new stream using ``iterator''""" - return cast (Type[Stream[O]], self.__class__) (iterator) + return cast (Type[Stream[_O]], self.__class__) (iterator) # # Intermediates # - def filter (self, predicate: Callable[[T], bool]) -> Stream[T]: + def filter (self, predicate: Callable[[_T], bool]) -> Stream[_T]: """Create a new stream for each element ``predicate'' returns True""" return self.new (filter (predicate, self.iterator)) - def exclude (self, predicate: Callable[[T], bool]) -> Stream[T]: + def exclude (self, predicate: Callable[[_T], bool]) -> Stream[_T]: """Create a new stream excluding each element ``prdicate'' returns True""" return self.new (filterfalse (predicate, self.iterator)) def error (self, - predicate: Callable[[T], bool], - exception: Union[None, str, Exception, Callable[[T], Exception]] = None - ) -> Stream[T]: + predicate: Callable[[_T], bool], + exception: Union[None, str, Exception, Callable[[_T], Exception]] = None + ) -> Stream[_T]: """Raise an error, if ``predicate'' returns False""" def check_for_error (e: Any) -> bool: if predicate (e): @@ -206,33 +206,33 @@ def check_for_error (e: Any) -> bool: def regexp (self, pattern: Union[str, Pattern[str]], flags: int = 0, - key: Optional[Callable[[T], str]] = None, - predicate: Optional[Callable[[Pattern[str], Match[str], T], T]] = None - ) -> Stream[T]: + key: Optional[Callable[[_T], str]] = None, + predicate: Optional[Callable[[Pattern[str], Match[str], _T], _T]] = None + ) -> Stream[_T]: """Create a new stream for each element matching regular expression ``pattern''. ``flags'' is passed to re.compile. If ``predicate'' is not None, this must be a callable which accepts three arguments, the compiled regular expression, the regular expression matching object and the element itself.""" expression = re.compile (pattern, flags) if isinstance (pattern, str) else pattern - def regexper () -> Iterator[T]: + def regexper () -> Iterator[_T]: for elem in self.iterator: m = expression.match (key (elem) if key is not None else str (elem)) if m is not None: yield predicate (expression, m, elem) if predicate is not None else elem return self.new (regexper ()) - def map (self, predicate: Callable[[T], O]) -> Stream[O]: + def map (self, predicate: Callable[[_T], _O]) -> Stream[_O]: """Create a new stream for each element mapped with ``predicate''""" return self.new ((predicate (_v) for _v in self.iterator)) - def map_to (self, t: Type[O], predicate: Callable[[T], Any]) -> Stream[O]: + def map_to (self, t: Type[_O], predicate: Callable[[_T], Any]) -> Stream[_O]: """Like map, but passing a type as a hint for the return type of predicate for type checking""" - return self.new ((cast (O, predicate (_v)) for _v in self.iterator)) + return self.new ((cast (_O, predicate (_v)) for _v in self.iterator)) - def distinct (self, key: Optional[Callable[[T], Any]] = None) -> Stream[T]: + def distinct (self, key: Optional[Callable[[_T], Any]] = None) -> Stream[_T]: """Create a new stream eleminating duplicate elements. If ``key'' is not None, it is used to build the key for checking identical elements""" - def distincter () -> Iterator[T]: + def distincter () -> Iterator[_T]: seen: Set[Any] = set () for elem in self.iterator: keyvalue = key (elem) if key is not None else elem @@ -241,19 +241,22 @@ def distincter () -> Iterator[T]: yield elem return self.new (distincter ()) - def gather (self) -> Stream[T]: - """Gather all input elemnts into a list before further processing, useful if the source is modified i a later stage of the stream""" + def drain (self) -> Stream[_T]: + """Drain the input stream + +i.e. collect all input elemnts into a list before further processing, +useful if the source is modified during a later stage of the stream""" return self.new (list (self.iterator)) - def sorted (self, key: Optional[Callable[[T], Any]] = None, reverse: bool = False) -> Stream[T]: + def sorted (self, key: Optional[Callable[[_T], Any]] = None, reverse: bool = False) -> Stream[_T]: """Create a new stream with sorted elements ``key'' and ``reverse'' are passed to sorted()""" return self.new (sorted (cast (Iterable, self.iterator), key = key, reverse = reverse)) - def reversed (self) -> Stream[T]: + def reversed (self) -> Stream[_T]: """Create a new stream in reverse order""" return self.new (reversed (self)) - def peek (self, predicate: Union[None, str, Callable[[T], Any]] = None) -> Stream[T]: + def peek (self, predicate: Union[None, str, Callable[[_T], Any]] = None) -> Stream[_T]: """Create a new stream while executing ``predicate'' for each element If predicate is None or a string, then each object is printed to @@ -297,14 +300,14 @@ def final (self, count: int) -> None: progress = p return progress - def progress (self, p: Union[str, Stream.Progress], checkpoint: Optional[int] = None) -> Stream[T]: + def progress (self, p: Union[str, Stream.Progress], checkpoint: Optional[int] = None) -> Stream[_T]: """Create a new stream which copies the stream calling the instance of ``p'' (an instance of Stream.Progress or a string) on each iteration. If ``p'' is a string, then ``checkpoint'' is an optional integer value which specifies in which intervals the a progression messsages is emitted'""" progress = self.__progress (p, checkpoint) - def progressor () -> Iterator[T]: + def progressor () -> Iterator[_T]: count = 0 for elem in self.iterator: count += 1 @@ -314,21 +317,21 @@ def progressor () -> Iterator[T]: return self.new (progressor ()) def __functions (self, - condition: Callable[[T], bool], - modifier: Callable[[T], O], - *args: Union[Callable[[T], bool], Callable[[T], O]] - ) -> Tuple[List[Tuple[Callable[[T], bool], Callable[[T], O]]], Optional[Callable[[T], O]]]: - conditions: List[Tuple[Callable[[T], bool], Callable[[T], O]]] = [(condition, modifier)] + condition: Callable[[_T], bool], + modifier: Callable[[_T], _O], + *args: Union[Callable[[_T], bool], Callable[[_T], _O]] + ) -> Tuple[List[Tuple[Callable[[_T], bool], Callable[[_T], _O]]], Optional[Callable[[_T], _O]]]: + conditions: List[Tuple[Callable[[_T], bool], Callable[[_T], _O]]] = [(condition, modifier)] while len (args) > 1: - conditions.append ((cast (Callable[[T], bool], args[0]), cast (Callable[[T], O], args[1]))) + conditions.append ((cast (Callable[[_T], bool], args[0]), cast (Callable[[_T], _O], args[1]))) args = args[2:] - return (conditions, cast (Callable[[T], O], args[0]) if args else None) + return (conditions, cast (Callable[[_T], _O], args[0]) if args else None) def switch (self, - condition: Callable[[T], bool], - modifier: Callable[[T], O], - *args: Union[Callable[[T], bool], Callable[[T], O]] - ) -> Stream[O]: + condition: Callable[[_T], bool], + modifier: Callable[[_T], _O], + *args: Union[Callable[[_T], bool], Callable[[_T], _O]] + ) -> Stream[_O]: """Create a new stream for using mulitple condition/mapping pairs. If an odd number of arguments are passed, the last one is considered @@ -346,41 +349,41 @@ def switcher (elem: Any) -> Any: return default (elem) if default else elem return self.new ((switcher (_e) for _e in self.iterator)) - def snap (self, target: List[T]) -> Stream[T]: + def snap (self, target: List[_T]) -> Stream[_T]: """Create a new stream saving each element in ``target'' (which must provide an append method)""" return self.peek (lambda v: target.append (v)) - def dropwhile (self, predicate: Callable[[T], bool]) -> Stream[T]: + def dropwhile (self, predicate: Callable[[_T], bool]) -> Stream[_T]: """Create a new stream ignore all elements where ``predicate'' returns False up to first match""" return self.new (dropwhile (predicate, self.iterator)) - def takewhile (self, predicate: Callable[[T], bool]) -> Stream[T]: + def takewhile (self, predicate: Callable[[_T], bool]) -> Stream[_T]: """Create a new stream as long as ``predicate'' returns the first time False""" return self.new (takewhile (predicate, self.iterator)) - def limit (self, size: Optional[int]) -> Stream[T]: + def limit (self, size: Optional[int]) -> Stream[_T]: """Create a new stream with a maximum of ``size'' elements""" if size is None: return self.new (self.iterator) return self.new (islice (self.iterator, 0, size)) - def skip (self, size: Optional[int]) -> Stream[T]: + def skip (self, size: Optional[int]) -> Stream[_T]: """Create a new stream where the first ``size'' elements are skipped""" if size is None: return self.new (self.iterator) return self.new (islice (self.iterator, size, None)) - def remain (self, size: Optional[int]) -> Stream[T]: + def remain (self, size: Optional[int]) -> Stream[_T]: """Create a new stream which contains the remaining ``size'' elements""" if size is None: return self.new (self.iterator) return self.new (deque (self.iterator, maxlen = size)) - def slice (self, *args: int) -> Stream[T]: + def slice (self, *args: int) -> Stream[_T]: """Create a new stream selecting slice(*``args'')""" return self.new (islice (self.iterator, *args)) - def chain (self, t: Type[O]) -> Stream[O]: + def chain (self, t: Type[_O]) -> Stream[_O]: """Create a new stream flatten the elements of the stream.""" return self.new (chain.from_iterable (self.iterator)) # @@ -390,7 +393,7 @@ def dismiss (self) -> None: """Dismiss all elements to terminate the stream""" deque (self.iterator, maxlen = 0) - def reduce (self, predicate: Callable[[T, T], T], identity: T = __sentinel) -> T: + def reduce (self, predicate: Callable[[_T, _T], _T], identity: _T = __sentinel) -> _T: """Reduce the stream by applying ``predicate''. If ``identity'' is available, use this as the initial value""" if identity is self.__sentinel: return reduce (predicate, self.iterator) @@ -401,7 +404,7 @@ def __checkNo (self, no: Any, where: str) -> Any: raise ValueError (f'no value available for Stream.{where}: empty result set') return no - def __position (self, finisher: Optional[Callable[[T], Any]], no: Any, position: Callable[[int], int], where: str) -> Any: + def __position (self, finisher: Optional[Callable[[_T], Any]], no: Any, position: Callable[[int], int], where: str) -> Any: collect: DefaultDict[Any, int] = defaultdict (int) for elem in self.iterator: collect[elem] += 1 @@ -411,14 +414,14 @@ def __position (self, finisher: Optional[Callable[[T], Any]], no: Any, position: return self.__checkNo (no, where) @overload - def first (self, finisher: None = ..., consume: bool = ..., no: T = ...) -> T: ... + def first (self, finisher: None = ..., consume: bool = ..., no: _T = ...) -> _T: ... @overload def first (self, finisher: None = ..., consume: bool = ..., no: Any = ...) -> Any: ... @overload - def first (self, finisher: Callable[[T], T], consume: bool = ..., no: T = ...) -> T: ... + def first (self, finisher: Callable[[_T], _T], consume: bool = ..., no: _T = ...) -> _T: ... @overload - def first (self, finisher: Callable[[T], O], consume: bool = ..., no: O = ...) -> O: ... - def first (self, finisher: Optional[Callable[[T], Any]] = None, consume: bool = True, no: Any = __sentinel) -> Any: + def first (self, finisher: Callable[[_T], _O], consume: bool = ..., no: _O = ...) -> _O: ... + def first (self, finisher: Optional[Callable[[_T], Any]] = None, consume: bool = True, no: Any = __sentinel) -> Any: """Returns the first element, ``no'' if stream is empty. ``finisher'', if not None, is called on a found element""" try: rc = next (self.iterator) @@ -429,14 +432,14 @@ def first (self, finisher: Optional[Callable[[T], Any]] = None, consume: bool = return self.__checkNo (no, 'first') @overload - def last (self, finisher: None = ..., no: T = ...) -> T: ... + def last (self, finisher: None = ..., no: _T = ...) -> _T: ... @overload def last (self, finisher: None = ..., no: Any = ...) -> Any: ... @overload - def last (self, finisher: Callable[[T], T], no: T = ...) -> T: ... + def last (self, finisher: Callable[[_T], _T], no: _T = ...) -> _T: ... @overload - def last (self, finisher: Callable[[T], O], no: O = ...) -> O: ... - def last (self, finisher: Optional[Callable[[T], Any]] = None, no: Any = __sentinel) -> Any: + def last (self, finisher: Callable[[_T], _O], no: _O = ...) -> _O: ... + def last (self, finisher: Optional[Callable[[_T], Any]] = None, no: Any = __sentinel) -> Any: """Returns the last element, ``no'' if stream is empty. ``finisher'', if not None, is called on a found element""" rc = deque (self.iterator, maxlen = 1) if len (rc): @@ -444,26 +447,26 @@ def last (self, finisher: Optional[Callable[[T], Any]] = None, no: Any = __senti return self.__checkNo (no, 'last') @overload - def most (self, finisher: None = ..., no: T = ...) -> T: ... + def most (self, finisher: None = ..., no: _T = ...) -> _T: ... @overload def most (self, finisher: None = ..., no: Any = ...) -> Any: ... @overload - def most (self, finisher: Callable[[T], T], no: T = ...) -> T: ... + def most (self, finisher: Callable[[_T], _T], no: _T = ...) -> _T: ... @overload - def most (self, finisher: Callable[[T], O], no: O = ...) -> O: ... - def most (self, finisher: Optional[Callable[[T], Any]] = None, no: Any = __sentinel) -> Any: + def most (self, finisher: Callable[[_T], _O], no: _O = ...) -> _O: ... + def most (self, finisher: Optional[Callable[[_T], Any]] = None, no: Any = __sentinel) -> Any: """Returns the element with the most often occurance, ``no'' if stream is empty. ``finisher'', if not None, is called on a found element""" return self.__position (finisher, no, lambda c: -1, 'most') @overload - def least (self, finisher: None = ..., no: T = ...) -> T: ... + def least (self, finisher: None = ..., no: _T = ...) -> _T: ... @overload def least (self, finisher: None = ..., no: Any = ...) -> Any: ... @overload - def least (self, finisher: Callable[[T], T], no: T = ...) -> T: ... + def least (self, finisher: Callable[[_T], _T], no: _T = ...) -> _T: ... @overload - def least (self, finisher: Callable[[T], O], no: O = ...) -> O: ... - def least (self, finisher: Optional[Callable[[T], Any]] = None, no: Any = __sentinel) -> Any: + def least (self, finisher: Callable[[_T], _O], no: _O = ...) -> _O: ... + def least (self, finisher: Optional[Callable[[_T], Any]] = None, no: Any = __sentinel) -> Any: """Returns the element with the least often occurance, ``no'' if stream is empty. ``finisher'', if not None, is called on a found element""" return self.__position (finisher, no, lambda c: 0, 'least') @@ -474,9 +477,9 @@ def sum (self, start: Optional[int] = None) -> int: return sum (cast (Iterable, self.iterator)) @overload - def min (self, no: T = ...) -> T: ... + def min (self, no: _T = ...) -> _T: ... @overload - def min (self, no: None = ...) -> Optional[T]: ... + def min (self, no: None = ...) -> Optional[_T]: ... def min (self, no: Any = __sentinel) -> Any: """Returns the minimum value of the stream""" try: @@ -485,9 +488,9 @@ def min (self, no: Any = __sentinel) -> Any: return self.__checkNo (no, 'min') @overload - def max (self, no: T = ...) -> T: ... + def max (self, no: _T = ...) -> _T: ... @overload - def max (self, no: None = ...) -> Optional[T]: ... + def max (self, no: None = ...) -> Optional[_T]: ... def max (self, no: Any = __sentinel) -> Any: """Returns the maximum value of the stream""" try: @@ -495,7 +498,7 @@ def max (self, no: Any = __sentinel) -> Any: except ValueError: return self.__checkNo (no, 'max') - def count (self, *args: T) -> int: + def count (self, *args: _T) -> int: """Without arguments returns the number of elements or return all elements which are part of ``args''""" if len (args) == 0: try: @@ -504,14 +507,14 @@ def count (self, *args: T) -> int: return sum ((1 for _ in self.iterator)) return sum ((1 for _v in self.iterator if _v in args)) - def counter (self) -> typing.Counter[T]: + def counter (self) -> typing.Counter[_T]: return Counter (self.iterator) - def any (self, predicate: Callable[[T], bool] = bool) -> bool: + def any (self, predicate: Callable[[_T], bool] = bool) -> bool: """Return True if at least one element matches ``predicate''""" return sum ((1 for _v in self.iterator if predicate (_v))) > 0 - def all (self, predicate: Callable[[T], bool] = bool) -> bool: + def all (self, predicate: Callable[[_T], bool] = bool) -> bool: """Return True if all element match ``predicate''""" counter = [0] def predicate_and_count (v: Any) -> bool: @@ -520,14 +523,14 @@ def predicate_and_count (v: Any) -> bool: matches = sum ((1 for _v in self.iterator if predicate_and_count (_v))) return matches == counter[0] - def each (self, predicate: Callable[[T], Any]) -> None: + def each (self, predicate: Callable[[_T], Any]) -> None: """Calls ``predicate'' on each element of the stream like java forEach()""" deque (filter (predicate, self.iterator), maxlen = 0) def dispatch (self, - condition: Callable[[T], bool], - modifier: Callable[[T], O], - *args: Union[Callable[[T], bool], Callable[[T], O]], + condition: Callable[[_T], bool], + modifier: Callable[[_T], _O], + *args: Union[Callable[[_T], bool], Callable[[_T], _O]], exclusive: bool = False ) -> None: """``args'' is a list of filter/handler functions @@ -551,26 +554,42 @@ def dispatcher (elem: Any) -> bool: return False deque (filter (dispatcher, self.iterator), maxlen = 0) - def list (self) -> List[T]: + def list (self) -> List[_T]: """Returns the stream as a list like java asList()""" return list (self.iterator) + + def list_to (self, t: Type[_O]) -> List[_O]: + """Like list, but help type checker""" + return cast (List[_O], list (self.iterator)) - def tuple (self) -> Tuple[T, ...]: + def tuple (self) -> Tuple[_T, ...]: """Returns the stream as a tuple""" return tuple (self.iterator) + + def tuple_to (self, t: Type[_O]) -> Tuple[_O, ...]: + """Like tuple, but help type checker""" + return cast (Tuple[_O, ...], tuple (self.iterator)) - def set (self) -> Set[T]: + def set (self) -> Set[_T]: """Returns the stream as set""" return set (self.iterator) + def set_to (self, t: Type[_O]) -> Set[_O]: + """Like set, but help type checker""" + return cast (Set[_O], set (self.iterator)) + def dict (self) -> Dict[Any, Any]: """Returns the stream as a dictionary""" return dict (cast (Iterable, self.iterator)) - def deque (self) -> typing.Deque[T]: + def deque (self) -> Deque[_T]: """Return the stream as collections.deque""" return deque (self.iterator) + def deque_to (self, t: Type[_O]) -> Deque[_O]: + """Like deque, but help type checker""" + return cast (Deque[_O], deque (self.iterator)) + def group (self, predicate: Optional[Callable[[Any], Tuple[Any, Any]]] = None, finisher: Optional[Callable[[Dict[Any, List[Any]]], Any]] = None) -> Any: """Returns a dict of grouped elements as separated by ``predicate'', optional modify the final dict by ``finisher''.""" rc: DefaultDict[Any, List[Any]] = defaultdict (list) @@ -601,7 +620,7 @@ def finisher (self, supplier: Any, count: int) -> Any: def collect (self, supplier: Any, - accumulator: Optional[Callable[[Any, T], None]] = None, + accumulator: Optional[Callable[[Any, _T], None]] = None, finisher: Optional[Callable[[Any, int], Any]] = None, progress: Union[None, str, Stream.Progress] = None, checkpoint: Optional[int] = None @@ -629,7 +648,7 @@ class Collector (Stream.Collector): def supplier (self) -> Any: return supplier () if callable (supplier) else supplier - def accumulator (self, supplier: Any, element: T) -> None: + def accumulator (self, supplier: Any, element: _T) -> None: if accumulator is not None: accumulator (supplier, element) @@ -649,12 +668,12 @@ def finisher (self, supplier: Any, count: int) -> Any: return collector.finisher (s, counter) def collect_to (self, - t: Type[O], + t: Type[_O], supplier: Any, - accumulator: Optional[Callable[[Any, T], None]] = None, - finisher: Optional[Callable[[Any, int], O]] = None, + accumulator: Optional[Callable[[Any, _T], None]] = None, + finisher: Optional[Callable[[Any, int], _O]] = None, progress: Union[None, str, Stream.Progress] = None, checkpoint: Optional[int] = None - ) -> O: + ) -> _O: """Like collect, but passing a type as a hint for the return type for type checking""" - return cast (O, self.collect (supplier, accumulator, finisher, progress, checkpoint)) + return cast (_O, self.collect (supplier, accumulator, finisher, progress, checkpoint)) diff --git a/backend/src/script/lib/agn3/systemconfig.py b/backend/src/script/lib/agn3/systemconfig.py index 9b34c05fa..203b55eaf 100644 --- a/backend/src/script/lib/agn3/systemconfig.py +++ b/backend/src/script/lib/agn3/systemconfig.py @@ -10,18 +10,20 @@ #################################################################################################################################################################################################################################################################### # from __future__ import annotations -import os, json, platform, pwd, re -from typing import Callable, Final, Optional, TypeVar, Union +import os, json, platform, pwd, re, base64 +from typing import Any, Callable, Final, Optional, TypeVar, Union from typing import Dict, KeysView, List, Set, Tuple from typing import overload +from .exceptions import error from .ignore import Ignore from .stream import Stream +from .template import Placeholder from .tools import atob, listsplit # __all__ = ['Systemconfig'] # -R = TypeVar ('R') -T = TypeVar ('T') +_R = TypeVar ('_R') +_T = TypeVar ('_T') # def _determinate_essentials () -> Tuple[str, str, str, str]: fqdn = platform.node ().lower () @@ -161,13 +163,13 @@ def fqdn (self) -> str: def host (self) -> str: return self._host - def pick (self, collection: Dict[Optional[str], T]) -> T: + def pick (self, collection: Dict[Optional[str], _T]) -> _T: for selection in self.selections: with Ignore (KeyError): return collection[selection] raise KeyError () - def pick_pattern (self, collection: Dict[str, T], key: str) -> T: + def pick_pattern (self, collection: Dict[str, _T], key: str) -> _T: for selection in self.selections: with Ignore (KeyError): return collection[f'{key}[{selection}]' if selection is not None else key] @@ -180,7 +182,7 @@ def __contains__ (self, hostname: Union[None, str, Set[Optional[str]]]) -> bool: _selection = Selection (_user, _fqdn, _host) __sentinel: Final[object] = object () - def __init__ (self, path: Optional[str] = None) -> None: + def __init__ (self) -> None: """path to configuration file or None to use default Setup the Systemconfig object and read the content of the system confg @@ -190,10 +192,9 @@ def __init__ (self, path: Optional[str] = None) -> None: self.content = os.environ.get ('SYSTEM_CONFIG') self.cfg: Dict[str, str] = {} if self.content is None: - if path is None: - path = self._default_path - if not os.path.isfile (path) and os.path.isfile (self._default_legacy_path): - path = self._default_legacy_path + path = self._default_path + if not os.path.isfile (path) and os.path.isfile (self._default_legacy_path): + path = self._default_legacy_path self.path = path self._check () else: @@ -201,15 +202,13 @@ def __init__ (self, path: Optional[str] = None) -> None: def _check (self) -> None: if self.path is not None: - try: + with Ignore (OSError, IOError): st = os.stat (self.path) if st.st_mtime != self.last_modified: self.last_modified = st.st_mtime with open (self.path) as fd: self.content = fd.read () self._parse () - except (OSError, IOError): - pass pattern_selective = re.compile ('^([^[]+)\\[([^]]+)\\]$') def _parse (self) -> None: @@ -232,15 +231,13 @@ def _parse (self) -> None: else: cur.append (line) elif line and not line.startswith ('#'): - try: + with Ignore (ValueError): (var, val) = [_v.strip () for _v in line.split ('=', 1)] if val == '{': cont = var cur.clear () else: self.cfg[var] = val - except ValueError: - pass update: Dict[str, str] = {} for (option, value) in self.cfg.items (): mtch = self.pattern_selective.match (option) @@ -340,10 +337,10 @@ def lget (self, var: str, default: Optional[List[str]] = None) -> Optional[List[ return default @overload - def __user (self, var: str, default: None, retriever: Callable[..., R]) -> Optional[R]: ... + def __user (self, var: str, default: None, retriever: Callable[..., _R]) -> Optional[_R]: ... @overload - def __user (self, var: str, default: R, retriever: Callable[..., R]) -> R: ... - def __user (self, var: str, default: Optional[R], retriever: Callable[..., R]) -> Optional[R]: + def __user (self, var: str, default: _R, retriever: Callable[..., _R]) -> _R: ... + def __user (self, var: str, default: Optional[_R], retriever: Callable[..., _R]) -> Optional[_R]: rc = retriever (f'{var}-{Systemconfig._user}', Systemconfig.__sentinel) if Systemconfig._user is not None else Systemconfig.__sentinel return rc if rc is not Systemconfig.__sentinel else retriever (var, default) @@ -392,7 +389,54 @@ def user_has (self, var: str, default: Optional[bool] = None) -> bool: if (rc := self.user_bget (var)) is not None: return rc return self.has (var, default = default) - + + __ph = Placeholder (True) + def as_config (self, + value: str, + *, + namespace: Optional[Dict[str, Any]] = None, + path_namespace: Optional[Dict[str, str]] = None, + path_base: str = '.', + optional: bool = False + ) -> Dict[str, Any]: + rc: Dict[str, Any] = {} + if value.startswith ('@'): + path = value[1:].strip () + if path_namespace: + path = Systemconfig.__ph (path, path_namespace) + path = os.path.expanduser (path) + if not os.path.isabs (path) and path_base: + path = os.path.abspath (os.path.join (path_base, path)) + else: + path = os.path.normpath (path) + try: + with open (path) as fd: + value = '\n'.join ([_l for _l in fd.readlines () if not _l.lstrip ().startswith ('#')]) + except IOError: + if optional: + return rc + raise + with Ignore (): + value = base64.b64decode (value, validate = True).decode ('UTF-8') + try: + config: Any = eval (value, namespace if namespace is not None else {}) + except Exception as e: + for state in 0, 1: + with Ignore (): + if state == 0: + if not namespace: + continue + config = json.loads (Systemconfig.__ph (value, namespace)) + else: + config = json.loads (value) + break + else: + raise error (f'"{value}": not a valid expression: {e}') + if isinstance (config, dict): + for (key, option) in config.items (): + if isinstance (key, str): + rc[key] = option + return rc def dump (self) -> None: """Display current configuration content""" diff --git a/backend/src/script/lib/agn3/template.py b/backend/src/script/lib/agn3/template.py index 004566dea..553ff1b96 100644 --- a/backend/src/script/lib/agn3/template.py +++ b/backend/src/script/lib/agn3/template.py @@ -90,7 +90,7 @@ def __init__ (self, fname: Optional[str], lang: Optional[str] = None, fill: Opti lang = line[1:-1] if lang == '*': lang = None - if not lang in self.messages: + if lang not in self.messages: self.messages[lang] = {} cur = self.messages[lang] else: @@ -113,7 +113,7 @@ def __getitem__ (self, token: str) -> str: try: msg = self.messages[self.lang][token] except KeyError: - if not self.lang is None: + if self.lang is not None: try: msg = self.messages[None][token] except KeyError: @@ -315,10 +315,10 @@ def __compileContent (self) -> None: pos = 0 clen = len (self.content) mtch = self.codeStart.search (self.content) - if not mtch is None: + if mtch is not None: start = mtch.end () mtch = self.codeEnd.search (self.content, start) - if not mtch is None: + if mtch is not None: (end, pos) = mtch.span () self.code += self.content[start:end] + '\n' else: @@ -339,9 +339,9 @@ def __compileContent (self) -> None: if start > pos: self.__compileString (self.content[pos:start]) pos = end - if not mtch is None: + if mtch is not None: tstart = start - if not groups[2] is None: + if groups[2] is not None: token = groups[2] arg = '' if token != '#': @@ -358,7 +358,7 @@ def __compileContent (self) -> None: escape = False elif ch == '\\': escape = True - elif not quote is None: + elif quote is not None: if ch == quote: quote = None elif ch in '\'"': @@ -423,7 +423,7 @@ def __compileContent (self) -> None: self.__compileError (tstart, 'Too many closing blocks') elif token in ('stop', ): pos = clen - elif not groups[3] is None: + elif groups[3] is not None: expr = groups[3] if expr == '$': self.__compileString ('$') @@ -431,13 +431,13 @@ def __compileContent (self) -> None: if len (expr) >= 2 and expr[0] == '{' and expr[-1] == '}': expr = expr[1:-1] self.__compileExpr (expr) - elif not groups[5] is None: + elif groups[5] is not None: expr = groups[5] if expr[0] == '[': self.__compileExpr ('_[\'{escape}\']'.format (escape =self.__escaper (expr[1:-1]))) elif expr[0] == '{': self.__compileExpr ('_ (\'{escape}\')'.format (escape = self.__escaper (expr[1:-1]))) - elif not groups[0] is None: + elif groups[0] is not None: self.__compileString (groups[0]) if self.indent > 0: self.__compileError (0, f'Missing {self.indent} closing #end statement(s)') @@ -483,7 +483,7 @@ def fill (self, namespace: Optional[Dict[str, Any]], lang: Optional[str] = None, self.namespace = {} else: self.namespace = namespace.copy () - if not lang is None: + if lang is not None: self.namespace['lang'] = lang self.namespace['property'] = self.properties if mc is None: @@ -498,7 +498,7 @@ def fill (self, namespace: Optional[Dict[str, Any]], lang: Optional[str] = None, except Exception as e: raise error (f'Execution failed: {e}') result = ''.join (self.namespace['__result']) - if not lang is None: + if lang is not None: nresult = [] for line in result.split ('\n'): mtch = self.langID.search (line) diff --git a/backend/src/script/lib/agn3/tools.py b/backend/src/script/lib/agn3/tools.py index 267fde30f..e5e4dd97b 100644 --- a/backend/src/script/lib/agn3/tools.py +++ b/backend/src/script/lib/agn3/tools.py @@ -12,8 +12,9 @@ # from __future__ import annotations import os, re, subprocess, errno, logging +import traceback from enum import Enum -from typing import Any, Callable, Optional +from typing import Any, Callable, NoReturn, Optional, Union from typing import Dict, Generator, List, Set from .exceptions import error from .stream import Stream @@ -22,7 +23,8 @@ logger = logging.getLogger (__name__) # __all__ = [ - 'atoi', 'atob', 'btoa', 'calc_hash', 'sizefmt', + 'abstract', + 'atoi', 'atof', 'atob', 'btoa', 'calc_hash', 'sizefmt', 'call', 'silent_call', 'match', 'listsplit', 'listjoin', 'Escape', 'escape', 'unescape', @@ -30,7 +32,16 @@ 'Progress' ] # -def atoi (s: Any, base: int = 10, default: int = 0) -> int: +def abstract () -> NoReturn: + try: + invoker = traceback.extract_stack ()[-3] + msg = f'abstract method {invoker.filename}:{invoker.lineno}:{invoker.name}.{invoker.line} not implemented' + except: + msg = 'abstract method not implemented (no clue where)' + logger.error (msg) + raise NotImplementedError (msg) +# +def atoi (s: Any, base: int = 10, default: Union[int, float, str] = 0) -> int: """Lazy parses a value into an integer parses input parameter as numeric value, use default if it is not @@ -59,9 +70,42 @@ def atoi (s: Any, base: int = 10, default: int = 0) -> int: return int (s) try: return int (s, base) - except (ValueError, TypeError): - return default - + except: + return int (default) + +def atof (s: Any, default: Union[int, float, str] = 0.0) -> float: + """Lazy parses a value into a float + +>>> atof (0.0) +0.0 +>>> atof (0) +0.0 +>>> atof ('0') +0.0 +>>> atof (1.0) +1.0 +>>> atof (1) +1.0 +>>> atof ('1') +1.0 +>>> atof ('x') +0.0 +>>> atof ('x', 2.0) +2.0 +>>> atof ('x', 2) +2.0 +>>> atof ('x', '2') +2.0 +>>> atof ('x', 'y') +Traceback (most recent call last): + ... +ValueError: could not convert string to float: 'y' +""" + try: + return float (s) + except: + return float (default) + def atob (s: Any) -> bool: """Interprets a value as a boolean @@ -480,24 +524,27 @@ class Plugin: Beware: you cannot overwrite existing names in the namespace! -You can subclass this class and implement the method "catchall" which must -return a function which takes a variable number of arguments. "catchall" -itself is only called with the name of the method which is not implemented -itself. +You can either pass a catchall function during instanciation or +subclass this class and implement the method "catchall" which must +return a function which takes a variable number of arguments. +"catchall" itself is only called with the name of the method which is +not implemented itself. """ __slots__ = ['_ns', '_st', '_ca'] - def __init__ (self, code: str, name: Optional[str] = None, ns: Optional[Dict[str, Any]] = None) -> None: + def __init__ (self, code: str, name: Optional[str] = None, ns: Optional[Dict[str, Any]] = None, catchall: Optional[Callable[[str], Callable[..., Any]]] = None) -> None: """Create a plugin using ``code'' for ``name'' using namespace ``ns''""" self._ns = {} if ns is None else ns.copy () self._st: List[Set[str]] = [] - if 'catchall' in self.__class__.__dict__ and callable (self.__class__.__dict__['catchall']): + if catchall is not None: + self._ca: Callable[[str], Callable[..., Any]] = catchall + elif 'catchall' in self.__class__.__dict__ and callable (self.__class__.__dict__['catchall']): self._ca = self.catchall else: - def catchall (name: str) -> Callable[..., Any]: + def catchall_dummy (name: str) -> Callable[..., Any]: def dummy (*args: Any, **kwargs: Any) -> Any: return None return dummy - self._ca = catchall + self._ca = catchall_dummy compiled = compile (code, name if name is not None else '*unset*', 'exec') exec (compiled, self._ns) @@ -541,7 +588,7 @@ class Progress (Stream.Progress): For longer running processes or loops, this can be used to visualize the progress of the process. For example when processing a CSV file: -rd = CSVReader ('some/file/name', CSVDefault) +rd = CSVReader ('some/file/name', dialect = CSVDefault) p = dagent.Progress ('csv reader') for row in rd: # process the row @@ -604,5 +651,4 @@ def log (self, s: str) -> None: def handle (self) -> None: """Hook for more action, e.g. a database commit, when show() is invoked""" - pass diff --git a/backend/src/script/lib/agn3/uid.py b/backend/src/script/lib/agn3/uid.py index 27acfa457..476f08fae 100644 --- a/backend/src/script/lib/agn3/uid.py +++ b/backend/src/script/lib/agn3/uid.py @@ -107,10 +107,16 @@ def parse (self, content: bytes) -> None: @staticmethod def encode (content: bytes) -> str: - return base64.urlsafe_b64encode (content).rstrip (b'=').decode ('us-ascii') + try: + return base64.urlsafe_b64encode (content).rstrip (b'=').decode ('us-ascii') + except Exception as e: + raise error (f'failed to encode {content!r}: {e}') @staticmethod def decode (content: str) -> bytes: - return base64.urlsafe_b64decode (content + '=' * (len (content) % 4)) + try: + return base64.urlsafe_b64decode (content + '=' * (len (content) % 4)) + except Exception as e: + raise error (f'failed to decode {content}: {e}') class UIDCache: __slots__ = ['instances'] @@ -200,7 +206,7 @@ def __init__ (self, handle_only_own_instance: bool) -> None: try: if db.open (): licence_id = int (EMMConfig (db = db, class_names = ['system']).get ('system', 'licence')) - if licence_id != 0: + if licence_id <= 0: raise error (f'invalid licence_id {licence_id} found') self.instances[licence_id] = UIDCache.Instance (licence_id, db) seen.add (dbid) @@ -216,7 +222,7 @@ def done (self) -> None: instance.close () def find (self, uid: UID) -> Tuple[UIDCache.Company, UIDCache.Mailing]: - if uid.licence_id != 0: + if uid.licence_id <= 0: raise error (f'invalid licence_id {uid.licence_id}') try: instance = self.instances[uid.licence_id] diff --git a/backend/src/script/lib/agn3/worker.py b/backend/src/script/lib/agn3/worker.py index 734e99700..74e6bf3aa 100644 --- a/backend/src/script/lib/agn3/worker.py +++ b/backend/src/script/lib/agn3/worker.py @@ -73,13 +73,10 @@ def controller_setup (self) -> Any: return None def controller_teardown (self, ctx: Any) -> None: """cleanup used resources""" - pass def controller_register (self, ctx: Any, serv: XMLRPC) -> None: """register methods to XML-RPC server ``serv''""" - pass def controller_step (self, ctx: Any) -> None: """called periodically""" - pass def __controller (self) -> None: logger.debug ('Controller starting') @@ -104,10 +101,8 @@ def executor_setup (self) -> Any: return None def executor_teardown (self, ctx: Any) -> None: """cleanup used resources""" - pass def executor_step (self, ctx: Any) -> None: """called periodically""" - pass def executor_request_preparse (self, ctx: Any, rq: Any) -> Any: """preparses request ``rq'' after fetching from queue""" return rq @@ -116,7 +111,6 @@ def executor_request_next (self, ctx: Any, rq: Any) -> Any: return rq def executor_request_handle (self, ctx: Any, rq: Any) -> None: """process request(s) ``rq''""" - pass def __executor (self) -> None: logger.debug ('Executor starting') diff --git a/backend/src/script/lib/agn3/xml.py b/backend/src/script/lib/agn3/xml.py index 22d607caf..02020e6bd 100644 --- a/backend/src/script/lib/agn3/xml.py +++ b/backend/src/script/lib/agn3/xml.py @@ -12,12 +12,12 @@ from __future__ import annotations import re, logging import xml.sax -from xml.sax import SAXParseException from xml.sax.handler import ContentHandler, ErrorHandler from types import TracebackType -from typing import Any, Callable, Optional, Union +from typing import Any, Callable, NoReturn, Optional, Union from typing import Dict, IO, List, Type from .exceptions import error +from .ignore import Ignore from .io import copen # __all__ = ['XMLWriter', 'XMLReader'] @@ -128,7 +128,7 @@ def __new_node (self, name: str, attrs: Optional[Dict[str, str]], simple: bool, for (var, val) in attrs.items (): out += ' {var}="{val}"'.format (var = var, val = self.__convert (val)) if simple: - if not text is None: + if text is not None: out += '>{text}\n'.format (text = self.__convert (text, cdata), name = name) else: out += '/>\n' @@ -159,7 +159,7 @@ def close (self, name: Optional[str] = None) -> None: name = self.backlog.pop () self.__end_node (name) else: - if not name in self.backlog: + if name not in self.backlog: raise error (f'{name} not found in backlog') while self.backlog: pname = self.backlog.pop () @@ -214,10 +214,8 @@ def end (self) -> None: if self.state == 1: while self.backlog: self.close () - try: + with Ignore (AttributeError): self.output.flush () - except AttributeError: - pass self.state = 2 class XMLReader (ContentHandler, ErrorHandler): @@ -437,15 +435,17 @@ def process_string (self, content: str) -> int: # # Error handler - def error (self, exc: SAXParseException) -> None: + def error (self, exc: BaseException) -> NoReturn: """writes an error""" self.__set_status (self.ERROR) logger.exception (exc) - def fatalError (self, exc: SAXParseException) -> None: + raise exc + def fatalError (self, exc: BaseException) -> NoReturn: """writes a fatal error""" self.__set_status (self.FATAL) logger.exception (exc) - def warning (self, exc: SAXParseException) -> None: + raise exc + def warning (self, exc: BaseException) -> None: """writes a warning""" self.__set_status (self.WARNING) logger.warning (exc) diff --git a/backend/src/script/lib/config.sh b/backend/src/script/lib/config.sh index eb0738125..19e3f74aa 100644 --- a/backend/src/script/lib/config.sh +++ b/backend/src/script/lib/config.sh @@ -23,24 +23,31 @@ fi export BASE # export SYSTEM_CONFIG='{ - "trigger-port": 8450, - "direct-path": true, - "direct-path-incoming": "/home/openemm/var/spool/DIRECT", - "direct-path-archive": "/home/openemm/var/spool/ARCHIVE", - "direct-path-recover": "/home/openemm/var/spool/RECOVER", - "direct-path-queues": "/home/openemm/var/spool/QUEUE", - "licence": 0, - "dbid": "openemm", - "merger-address": "127.0.0.1", - "filter-name": "localhost", - "mailout-server": "localhost", - "mailout-port": 8093, - "direct-path-server": "localhost", - "direct-path-port": 9403 + "trigger-port": 8450, + "direct-path": true, + "direct-path-incoming": "/home/openemm/var/spool/DIRECT", + "direct-path-archive": "/home/openemm/var/spool/ARCHIVE", + "direct-path-recover": "/home/openemm/var/spool/RECOVER", + "direct-path-queues": "/home/openemm/var/spool/QUEUE", + "licence": 0, + "dbid": "openemm", + "merger-address": "127.0.0.1", + "filter-name": "localhost", + "mailout-server": "localhost", + "mailout-port": 8093, + "direct-path-server": "localhost", + "direct-path-port": 9403 }' export DBCFG_PATH="$BASE/etc/dbcfg" # -version="`cut '-d;' -f1 ~/scripts/build.spec`" +# +if [ -f "$BASE/scripts/build.spec" ]; then + version="`cut '-d;' -f1 $BASE/scripts/build.spec`" +elif [ -x "$BASE/bin/xmlback" ]; then + version="`$BASE/bin/xmlback -V | awk '{ print $3 }'`" +else + version="current" +fi licence="`$BASE/bin/config-query licence`" system="`uname -s`" host="`uname -n | cut -d. -f1`" @@ -279,7 +286,7 @@ setupVirtualEnviron() { 3*) ;; *) - die "virtual enviroment not support for deprectaed python versions" + die "virtual environment not support for deprectaed python versions" ;; esac venv="$BASE/.venv.$pyversion" diff --git a/backend/src/script/process/bav-update3.py b/backend/src/script/process/bav-update3.py index 52891ab59..0fce7c69e 100644 --- a/backend/src/script/process/bav-update3.py +++ b/backend/src/script/process/bav-update3.py @@ -79,7 +79,7 @@ def executor (self) -> bool: def file_reader (self, fname: str) -> List[str]: with open (fname, errors = 'backslashreplace') as fd: - return [line.rstrip ('\r\n') for line in fd if not line[0] in '\n#'] + return [line.rstrip ('\r\n') for line in fd if line[0] not in '\n#'] def valid_domain (self, domain: Optional[str]) -> bool: return domain is not None and self.valid_domain_pattern.match (domain) is not None diff --git a/backend/src/script/process/bavd3.py b/backend/src/script/process/bavd3.py index d49dbd99a..c4fbb73e5 100644 --- a/backend/src/script/process/bavd3.py +++ b/backend/src/script/process/bavd3.py @@ -20,6 +20,7 @@ from datetime import datetime from email.message import EmailMessage from email.utils import parseaddr +from urllib.parse import quote from typing import Any, Final, Optional from typing import ClassVar, DefaultDict, Dict, List, NamedTuple, Pattern, Tuple from typing import cast @@ -28,8 +29,8 @@ from agn3.email import EMail, ParseEMail from agn3.emm.bounce import Bounce from agn3.emm.datasource import Datasource -from agn3.emm.types import UserStatus -from agn3.exceptions import error +from agn3.emm.types import MediaType, UserStatus +from agn3.exceptions import error, Stop from agn3.ignore import Ignore from agn3.io import which from agn3.log import log_limit, log @@ -67,6 +68,50 @@ def invoke (url: str, retries: int = 3, **kws: Any) -> Tuple[bool, requests.Resp raise raise error (f'{url} invocation failed after {retries} retries') +class Report: + __slots__ = ['logpath', 'content'] + def __init__ (self, logpath: Optional[str]) -> None: + self.logpath = logpath + self.content: Dict[str, str] = {} + self['rid'] = 'unknown' + self['timestamp'] = datetime.now () + self['action'] = 'unprocessed' + + def __setitem__ (self, option: str, value: Any) -> None: + if value is None: + use = '' + elif isinstance (value, str): + use = value + elif isinstance (value, bool): + use = 'true' if value else 'false' + elif isinstance (value, datetime): + use = f'{value:%Y-%m-%d %H:%M:%S}' + else: + use = str (value) + self.content[option] = use + + def write (self, ok: bool) -> None: + report = '{licence};{rid};{timestamp};{action};{status};{info}'.format ( + licence = atoi (self.content.get ('licence', licence)), + rid = self.content['rid'], + timestamp = self.content['timestamp'], + action = self.content['action'], + status = '+' if ok else '-', + info = (Stream (self.content.items ()) + .filter (lambda kv: kv[0] not in ('rid', 'timestamp', 'action')) + .map (lambda kv: '{k}={v}'.format (k = kv[0], v = quote (kv[1]))) + .join ('\t') + ) + ) + if self.logpath is not None: + try: + with open (self.logpath, 'a') as fd: + fd.write (f'{report}\n') + except IOError as e: + logger.error (f'report: failed to write "{report}" to "{self.logpath}": {e}') + else: + print (f'Would report {report}') + class Autoresponder: __slots__ = ['aid', 'sender'] name_autoresponder: Final[str] = 'autoresponder' @@ -148,6 +193,7 @@ def allow (self, db: DB, bounce: Bounce, parameter: Line, cinfo: ParseEMail.Orig def trigger_message (self, db: DB, + report: Report, bounce: Bounce, cinfo: Optional[ParseEMail.Origin], parameter: Line, @@ -160,9 +206,12 @@ def trigger_message (self, raise error ('no company id set') mailing_id = parameter.autoresponder_mailing_id company_id = parameter.company_id + report['autoresponder-mailing-id'] = mailing_id if cinfo is None: + report['autoresponder-failure'] = 'unable to detect origin' raise error ('failed to determinate origin of mail') if not cinfo.valid: + report['autoresponder-failure'] = 'invalid origin detected' raise error ('uid from foreign instance %s (expected from %s)' % (cinfo.licence_id, licence)) customer_id = cinfo.customer_id rdir_domain = None @@ -175,28 +224,44 @@ def trigger_message (self, {'mailing_id': mailing_id} ) if rq is None: + report['autoresponder-failure'] = f'mailing with mailing-id {mailing_id} not found' raise error ('mailing %d not found' % mailing_id) if rq.company_id != company_id: + report['autoresponder-failure'] = f'mailing with mailing-id {mailing_id} does not belong to company_id {company_id} but {rq.company_id}' raise error ('mailing %d belongs to company %d, but mailloop belongs to company %d' % (mailing_id, rq[1], company_id)) if rq.deleted: - logger.info ('mailing %d is marked as deleted' % mailing_id) + report['autoresponder-failure'] = f'mailing with mailing-id {mailing_id} is marked as deleted' + raise error ('mailing %d is marked as deleted' % mailing_id) mailinglist_id = rq.mailinglist_id + mailinglist_ids = {mailinglist_id} + if cinfo.mailing_id > 0: + rq = db.querys ( + 'SELECT mailinglist_id ' + 'FROM mailing_tbl ' + 'WHERE mailing_id = :mailing_id', + { + 'mailing_id': cinfo.mailing_id + } + ) + if rq is not None and rq.mailinglist_id: + mailinglist_ids.add (rq.mailinglist_id) # - rq = db.querys ( - 'SELECT user_type ' - 'FROM customer_%d_binding_tbl ' - 'WHERE customer_id = :customer_id AND mailinglist_id = :mailinglist_id AND mediatype = 0' - % company_id, + for row in db.queryc ( + 'SELECT mailinglist_id, user_type ' + f'FROM customer_{company_id}_binding_tbl ' + 'WHERE customer_id = :customer_id AND mediatype = :mediatype', { 'customer_id': customer_id, - 'mailinglist_id': mailinglist_id + 'mediatype': MediaType.EMAIL.value } - ) - if rq is None or rq.user_type not in ('A', 'T', 't'): + ): + if row.user_type in ('A', 'T', 't') and row.mailinglist_id in mailinglist_ids: + logger.info ('recipient %d on %d for %d is admin/test recipient and not blocked' % (customer_id, row.mailinglist_id, mailing_id)) + break + else: if not self.allow (db, bounce, parameter, cinfo, dryrun): + report['autoresponder-failure'] = 'recipient blocked due to previous sent autoresponder' raise error ('recipient is not allowed to received (again) an autoresponder') - else: - logger.info ('recipient %d on %d for %d is admin/test recipient and not blocked' % (customer_id, mailinglist_id, mailing_id)) # rq = db.querys ( 'SELECT rdir_domain ' @@ -227,7 +292,7 @@ def trigger_message (self, if rq is None: raise error ('no entry in mailloop_tbl for %s found' % self.aid) security_token = rq.security_token if rq.security_token else '' - url = f'{rdir_domain}/sendMailloopAutoresponder.do' + url = f'{rdir_domain}/sendMailloopAutoresponder.action' params: Dict[str, str] = { 'mailloopID': self.aid, 'companyID': str (company_id), @@ -240,12 +305,12 @@ def trigger_message (self, try: (success, response) = invoke (url, params = params) if success: + report['autoresponder-success'] = f'sent to {customer_id}' logger.info ('Autoresponder mailing %d for customer %d triggered: %s' % (mailing_id, customer_id, response)) else: logger.warning (f'Autoresponder mailing {mailing_id} for customer {customer_id} failed due to: {response} with {response.text}') except Exception as e: logger.error (f'Failed to trigger {url}: {e}') - except error as e: logger.info ('Failed to send autoresponder: %s' % str (e)) except (KeyError, ValueError, TypeError) as e: @@ -484,7 +549,7 @@ def __init__ (self) -> None: super ().__init__ ( Field ('sender', self.__parse_address, optional = True, source = 'from'), Field ('to', self.__parse_address, optional = True), - Field ('rid', atoi, optional = True, default = lambda: 0), + Field ('rid', atoi, optional = True, default = lambda n: 0), Field ('rid_name', optional = True, source = 'rid'), Field ('company_id', int, optional = True, source = 'cid'), Field ('forward', self.__listsplit, optional = True, source = 'fwd'), @@ -551,7 +616,8 @@ def parse (self, parameter: str) -> Line: class BAV: __slots__ = [ 'bounce', - 'raw', 'msg', 'dryrun', 'uid_handler', 'parsed_email', 'cinfo', 'parameter', + 'raw', 'msg', 'dryrun', 'report', + 'uid_handler', 'parsed_email', 'cinfo', 'parameter', 'header_from', 'rid', 'sender', 'rule', 'reason' ] x_agn = 'X-AGNMailloop' @@ -559,6 +625,7 @@ class BAV: has_spamassassin = which ('spamassassin') is not None save_pattern = os.path.join (base, 'var', 'spool', 'filter', '%s-%s') ext_bouncelog = os.path.join (base, 'log', 'extbounce.log') + reportlog = os.path.join (base, 'log', 'mailloop.log') class From (NamedTuple): realname: str address: str @@ -568,15 +635,22 @@ def __init__ (self, bavconfig: BAVConfig, bounce: Bounce, raw: str, msg: EmailMe self.raw = raw self.msg = msg self.dryrun = dryrun + self.report = Report (BAV.reportlog if not dryrun else None) self.uid_handler = UIDHandler (enable_cache = True) self.parsed_email = ParseEMail (raw, uid_handler = self.uid_handler) self.cinfo = self.parsed_email.get_origin () + if self.cinfo is not None and self.cinfo.valid: + self.report['customer_id'] = self.cinfo.customer_id + self.report['mailing_id'] = self.cinfo.mailing_id + self.report['company_id'] = self.cinfo.company_id + self.report['licence_id'] = self.cinfo.licence_id if self.cinfo.licence_id is not None else licence return_path = None return_path_header = self.msg['return-path'] if return_path_header is not None: match = BAVConfig.address_pattern.search (return_path_header) if match is not None: return_path = match.group (1) + self.report['return-path'] = return_path try: parameter: Optional[str] = self.msg[BAV.x_agn] except KeyError: @@ -587,6 +661,7 @@ def __init__ (self, bavconfig: BAVConfig, bounce: Bounce, raw: str, msg: EmailMe address = '{local_part}@{domain_part}'.format (local_part = local_part, domain_part = domain_part.lower ()) parameter = bavconfig[address][0] if parameter is not None: + self.report['parameter'] = parameter self.parameter = bavconfig.parse (parameter) if ( self.cinfo is not None and @@ -595,7 +670,7 @@ def __init__ (self, bavconfig: BAVConfig, bounce: Bounce, raw: str, msg: EmailMe self.cinfo.company_id != self.parameter.company_id and self.parameter.to ): - with Ignore (StopIteration): + with Ignore (Stop): for address in ( self.parameter.to, '@{domain}'.format (domain = self.parameter.to.split ('@', 1)[-1]) @@ -605,21 +680,25 @@ def __init__ (self, bavconfig: BAVConfig, bounce: Bounce, raw: str, msg: EmailMe new_parameter = bavconfig.parse (parameter) if new_parameter.company_id == self.cinfo.company_id: self.parameter = new_parameter._replace (sender = self.parameter.sender, to = self.parameter.to) - raise StopIteration () + raise Stop () else: self.parameter = bavconfig.parse ('') try: self.header_from: Optional[BAV.From] = BAV.From (*parseaddr (cast (str, self.msg['from']))) if 'from' in self.msg else None + if self.header_from is not None: + self.report['from'] = self.header_from.address except: self.header_from = None self.rid = self.parameter.rid + self.report['rid'] = self.rid if return_path is not None: self.sender = return_path elif self.parameter.sender: self.sender = self.parameter.sender else: self.sender = 'postmaster' + self.report['sender'] = self.sender if not msg.get_unixfrom (): msg.set_unixfrom (time.strftime ('From ' + self.sender + ' %c')) self.rule = Rule (bounce.get_rule (0 if self.cinfo is None else self.cinfo.company_id, self.rid)) @@ -636,6 +715,7 @@ def save_message (self, action: str) -> None: logger.debug (f'Saved mesage to {fname}') except IOError as e: logger.error ('Unable to save mail copy to %s %s' % (fname, e)) + self.report['saved'] = fname def sendmail (self, msg: EmailMessage, to: List[str]) -> None: if self.dryrun: @@ -654,6 +734,7 @@ def sendmail (self, msg: EmailMessage, to: List[str]) -> None: logger.debug (f'Send message to {to}') except Exception as e: logger.exception ('Sending mail to %s failed %s' % (to, e)) + self.report['sent'] = ','.join (to) __find_score = re.compile ('score=([0-9]+(\\.[0-9]+)?)') def filter_with_spam_assassin (self, fwd: Optional[List[str]]) -> Optional[List[str]]: @@ -669,14 +750,18 @@ def filter_with_spam_assassin (self, fwd: Optional[List[str]]) -> Optional[List[ self.msg = nmsg spam_status = nmsg['x-spam-status'] if spam_status is not None: + self.report['spam-status'] = spam_status try: m = self.__find_score.search (cast (str, spam_status)) if m is not None: spam_score = float (m.group (1)) + self.report['spam-score'] = spam_score if self.parameter.spam_required is not None and self.parameter.spam_required < spam_score: + self.report['spam-consequence'] = 'not forwarded' fwd = None elif self.parameter.spam_forward is not None and self.parameter.spam_forward < spam_score: fwd = self.parameter.spam_email + self.report['spam-consequence'] = 'forwarded to alternatives' except ValueError as e: logger.warning ('Failed to parse spam score/spam parameter: %s' % str (e)) else: @@ -685,246 +770,256 @@ def filter_with_spam_assassin (self, fwd: Optional[List[str]]) -> Optional[List[ logger.warning ('Failed to parse filtered mail') else: logger.warning ('Failed to retrieve filtered mail') + if fwd: + self.report['spam_forward'] = ','.join (fwd) return fwd def unsubscribe (self, db: DB, customer_id: int, mailing_id: int, user_remark: str) -> None: if self.dryrun: print (f'Would unsubscribe {customer_id} due to mailing {mailing_id} with {user_remark}') - return - # - if db.isopen (): - rq = db.querys ('SELECT mailinglist_id, company_id FROM mailing_tbl WHERE mailing_id = :mailing_id', {'mailing_id': mailing_id}) - if rq is not None: - mailinglist_id = rq.mailinglist_id - company_id = rq.company_id - cnt = db.update ( - 'UPDATE customer_%d_binding_tbl ' - 'SET user_status = :userStatus, user_remark = :user_remark, timestamp = CURRENT_TIMESTAMP, exit_mailing_id = :mailing_id ' - 'WHERE customer_id = :customer_id AND mailinglist_id = :mailinglist_id' - % company_id, - { - 'userStatus': UserStatus.OPTOUT.value, - 'user_remark': user_remark, - 'mailing_id': mailing_id, - 'customer_id': customer_id, - 'mailinglist_id': mailinglist_id - } - ) - if cnt > 0: - logger.info ('Unsubscribed customer %d for company %d on mailinglist %d due to mailing %d using %s' % (customer_id, company_id, mailinglist_id, mailing_id, user_remark)) + else: + if db.isopen (): + rq = db.querys ('SELECT mailinglist_id, company_id FROM mailing_tbl WHERE mailing_id = :mailing_id', {'mailing_id': mailing_id}) + if rq is not None: + mailinglist_id = rq.mailinglist_id + company_id = rq.company_id + cnt = db.update ( + 'UPDATE customer_%d_binding_tbl ' + 'SET user_status = :userStatus, user_remark = :user_remark, timestamp = CURRENT_TIMESTAMP, exit_mailing_id = :mailing_id ' + 'WHERE customer_id = :customer_id AND mailinglist_id = :mailinglist_id' + % company_id, + { + 'userStatus': UserStatus.OPTOUT.value, + 'user_remark': user_remark, + 'mailing_id': mailing_id, + 'customer_id': customer_id, + 'mailinglist_id': mailinglist_id + } + ) + if cnt > 0: + logger.info ('Unsubscribed customer %d for company %d on mailinglist %d due to mailing %d using %s' % (customer_id, company_id, mailinglist_id, mailing_id, user_remark)) + else: + logger.warning ('Failed to unsubscribe customer %d for company %d on mailinglist %d due to mailing %d, matching %d rows (expected one row)' % (customer_id, company_id, mailinglist_id, mailing_id, cnt)) + db.sync () else: - logger.warning ('Failed to unsubscribe customer %d for company %d on mailinglist %d due to mailing %d, matching %d rows (expected one row)' % (customer_id, company_id, mailinglist_id, mailing_id, cnt)) - db.sync () - else: - logger.debug (f'No mailing for {mailing_id} found') + logger.debug (f'No mailing for {mailing_id} found') + self.report['unsubscribe'] = user_remark def subscribe (self, db: DB, address: str, fullname: str, company_id: int, mailinglist_id: int, formular_id: int) -> None: if self.dryrun: print ('Would try to subscribe "%s" (%r) on %d/%d sending DOI using %r' % (address, fullname, company_id, mailinglist_id, formular_id)) - return - # - if db.isopen (): - logger.info ('Try to subscribe %s (%s) for %d to %d using %d' % (address, fullname, company_id, mailinglist_id, formular_id)) - customer_id: Optional[int] = None - new_binding = True - send_mail = True - user_remark = 'Subscribe via mailloop #%s' % self.rid - custids = (db.stream ('SELECT customer_id FROM customer_%d_tbl WHERE email = :email' % company_id, {'email': address }) - .map_to (int, lambda r: r.customer_id) - .list () - ) - if custids: - logger.info ('Found these customer_ids %s for the email %s' % (custids, address)) - query = 'SELECT customer_id, user_status FROM customer_%d_binding_tbl WHERE customer_id ' % company_id - if len (custids) > 1: - query += 'IN (' - sep = '' - for custid in custids: - query += '%s%d' % (sep, custid) - sep = ', ' - query += ')' - else: - query += '= %d' % custids[0] - query += ' AND mailinglist_id = %d AND mediatype = 0' % mailinglist_id - use: Optional[Row] = None - for rec in db.query (query): - logger.info (f'Found binding [cid, status] {rec}') - if rec.user_status == UserStatus.ACTIVE.value: - if use is None or use.user_status != UserStatus.ACTIVE.value or rec.user_status > use.user_status: + else: + if db.isopen (): + logger.info ('Try to subscribe %s (%s) for %d to %d using %d' % (address, fullname, company_id, mailinglist_id, formular_id)) + customer_id: Optional[int] = None + new_binding = True + send_mail = True + user_remark = 'Subscribe via mailloop #%s' % self.rid + custids = (db.stream ('SELECT customer_id FROM customer_%d_tbl WHERE email = :email' % company_id, {'email': address }) + .map_to (int, lambda r: r.customer_id) + .list () + ) + if custids: + logger.info ('Found these customer_ids %s for the email %s' % (custids, address)) + query = 'SELECT customer_id, user_status FROM customer_%d_binding_tbl WHERE customer_id ' % company_id + if len (custids) > 1: + query += 'IN (' + sep = '' + for custid in custids: + query += '%s%d' % (sep, custid) + sep = ', ' + query += ')' + else: + query += '= %d' % custids[0] + query += ' AND mailinglist_id = %d AND mediatype = 0' % mailinglist_id + use: Optional[Row] = None + for rec in db.query (query): + logger.info (f'Found binding [cid, status] {rec}') + if rec.user_status == UserStatus.ACTIVE.value: + if use is None or use.user_status != UserStatus.ACTIVE.value or rec.user_status > use.user_status: + use = rec + elif use is None or (use.user_status != UserStatus.ACTIVE.value and rec.customer_id > use.customer_id): use = rec - elif use is None or (use.user_status != UserStatus.ACTIVE.value and rec.customer_id > use.customer_id): - use = rec - if use is not None: - logger.info ('Use customer_id %d with user_status %d' % (use.customer_id, use.user_status)) - customer_id = use.customer_id - new_binding = False - if use.user_status in (UserStatus.ACTIVE.value, UserStatus.WAITCONFIRM.value): - logger.info ('User status is %d, stop processing here' % use.user_status) - send_mail = False + if use is not None: + logger.info ('Use customer_id %d with user_status %d' % (use.customer_id, use.user_status)) + customer_id = use.customer_id + new_binding = False + if use.user_status in (UserStatus.ACTIVE.value, UserStatus.WAITCONFIRM.value): + logger.info ('User status is %d, stop processing here' % use.user_status) + send_mail = False + else: + logger.info ('Set user status to 5') + db.update ( + 'UPDATE customer_%d_binding_tbl ' + 'SET timestamp = CURRENT_TIMESTAMP, user_status = :user_status, user_remark = :user_remark ' + 'WHERE customer_id = :customer_id AND mailinglist_id = :mailinglist_id AND mediatype = 0' + % company_id, + { + 'user_status': UserStatus.WAITCONFIRM.value, + 'user_remark': user_remark, + 'customer_id': customer_id, + 'mailinglist_id': mailinglist_id + }, + commit = True + ) else: - logger.info ('Set user status to 5') + customer_id = max (custids) + logger.info ('No matching binding found, use cutomer_id %s' % customer_id) + else: + datasource_description = 'Mailloop #%s' % self.rid + dsid = Datasource () + datasource_id = dsid.get_id (datasource_description, company_id, 4) + if db.dbms in ('mysql', 'mariadb'): db.update ( - 'UPDATE customer_%d_binding_tbl ' - 'SET timestamp = CURRENT_TIMESTAMP, user_status = :user_status, user_remark = :user_remark ' - 'WHERE customer_id = :customer_id AND mailinglist_id = :mailinglist_id AND mediatype = 0' + 'INSERT INTO customer_%d_tbl (email, gender, mailtype, timestamp, creation_date, datasource_id) ' + 'VALUES (:email, 2, 1, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, :datasource_id)' + % company_id, + { + 'email': address, + 'datasource_id': datasource_id + }, + commit = True + ) + for rec in db.query ('SELECT customer_id FROM customer_%d_tbl WHERE email = :email' % company_id, {'email': address}): + customer_id = rec.customer_id + elif db.dbms == 'oracle': + for rec in db.query ('SELECT customer_%d_tbl_seq.nextval FROM dual' % company_id): + customer_id = rec[0] + logger.info ('No customer for email %s found, use new customer_id %s' % (address, customer_id)) + if customer_id is not None: + logger.info ('Got datasource id %s for %s' % (datasource_id, datasource_description)) + prefix = 'INSERT INTO customer_%d_tbl (customer_id, email, gender, mailtype, timestamp, creation_date, datasource_id' % company_id + values = 'VALUES (:customer_id, :email, 2, 1, sysdate, sysdate, :datasource_id' + data = { + 'customer_id': customer_id, + 'email': address, + 'datasource_id': datasource_id + } + parts = fullname.split () + while len (parts) > 2: + if parts[0] and parts[0][-1] == '.': + parts = parts[1:] + elif parts[1] and parts[1][-1] == '.': + parts = parts[:1] + parts[2:] + else: + temp = [parts[0], ' '.join (parts[1:])] + parts = temp + if len (parts) == 2: + prefix += ', firstname, lastname' + values += ', :firstname, :lastname' + data['firstname'] = parts[0] + data['lastname'] = parts[1] + elif len (parts) == 1: + prefix += ', lastname' + values += ', :lastname' + data['lastname'] = parts[0] + query = prefix + ') ' + values + ')' + logger.info ('Using "%s" with %s to write customer to database' % (query, data)) + try: + db.update (query, data, commit = True) + except Exception as e: + logger.error ('Failed to insert new customer %r: %s' % (address, e)) + customer_id = None + if customer_id is not None: + if new_binding: + db.update ( + 'INSERT INTO customer_%d_binding_tbl ' + ' (customer_id, mailinglist_id, user_type, user_status, user_remark, timestamp, creation_date, mediatype) ' + 'VALUES ' + ' (:customer_id, :mailinglist_id, :user_type, :user_status, :user_remark, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, 0)' % company_id, { - 'user_status': UserStatus.WAITCONFIRM.value, - 'user_remark': user_remark, 'customer_id': customer_id, - 'mailinglist_id': mailinglist_id + 'mailinglist_id': mailinglist_id, + 'user_type': 'W', + 'user_status': UserStatus.WAITCONFIRM.value, + 'user_remark': user_remark }, commit = True ) - else: - customer_id = max (custids) - logger.info ('No matching binding found, use cutomer_id %s' % customer_id) - else: - datasource_description = 'Mailloop #%s' % self.rid - dsid = Datasource () - datasource_id = dsid.get_id (datasource_description, company_id, 4) - if db.dbms in ('mysql', 'mariadb'): - db.update ( - 'INSERT INTO customer_%d_tbl (email, gender, mailtype, timestamp, creation_date, datasource_id) ' - 'VALUES (:email, 2, 1, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, :datasource_id)' - % company_id, - { - 'email': address, - 'datasource_id': datasource_id - }, - commit = True - ) - for rec in db.query ('SELECT customer_id FROM customer_%d_tbl WHERE email = :email' % company_id, {'email': address}): - customer_id = rec.customer_id - elif db.dbms == 'oracle': - for rec in db.query ('SELECT customer_%d_tbl_seq.nextval FROM dual' % company_id): - customer_id = rec[0] - logger.info ('No customer for email %s found, use new customer_id %s' % (address, customer_id)) - if customer_id is not None: - logger.info ('Got datasource id %s for %s' % (datasource_id, datasource_description)) - prefix = 'INSERT INTO customer_%d_tbl (customer_id, email, gender, mailtype, timestamp, creation_date, datasource_id' % company_id - values = 'VALUES (:customer_id, :email, 2, 1, sysdate, sysdate, :datasource_id' - data = { - 'customer_id': customer_id, - 'email': address, - 'datasource_id': datasource_id - } - parts = fullname.split () - while len (parts) > 2: - if parts[0] and parts[0][-1] == '.': - parts = parts[1:] - elif parts[1] and parts[1][-1] == '.': - parts = parts[:1] + parts[2:] - else: - temp = [parts[0], ' '.join (parts[1:])] - parts = temp - if len (parts) == 2: - prefix += ', firstname, lastname' - values += ', :firstname, :lastname' - data['firstname'] = parts[0] - data['lastname'] = parts[1] - elif len (parts) == 1: - prefix += ', lastname' - values += ', :lastname' - data['lastname'] = parts[0] - query = prefix + ') ' + values + ')' - logger.info ('Using "%s" with %s to write customer to database' % (query, data)) - try: - db.update (query, data, commit = True) - except Exception as e: - logger.error ('Failed to insert new customer %r: %s' % (address, e)) - customer_id = None - if customer_id is not None: - if new_binding: - db.update ( - 'INSERT INTO customer_%d_binding_tbl ' - ' (customer_id, mailinglist_id, user_type, user_status, user_remark, timestamp, creation_date, mediatype) ' - 'VALUES ' - ' (:customer_id, :mailinglist_id, :user_type, :user_status, :user_remark, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, 0)' - % company_id, - { - 'customer_id': customer_id, - 'mailinglist_id': mailinglist_id, - 'user_type': 'W', - 'user_status': UserStatus.WAITCONFIRM.value, - 'user_remark': user_remark - }, - commit = True - ) - logger.info ('Created new binding using') - if send_mail: - formname = None - rdir = None - for rec in db.query ('SELECT formname FROM userform_tbl WHERE form_id = %d AND company_id = %d' % (formular_id, company_id)): - if rec.formname: - formname = rec.formname - for rec in db.query ('SELECT rdir_domain FROM mailinglist_tbl WHERE mailinglist_id = %d' % mailinglist_id): - rdir = rec.rdir_domain - if rdir is None: - for rec in db.query ('SELECT rdir_domain FROM company_tbl WHERE company_id = %d' % company_id): + logger.info ('Created new binding using') + if send_mail: + formname = None + rdir = None + company_token = None + for rec in db.query ('SELECT formname FROM userform_tbl WHERE form_id = %d AND company_id = %d' % (formular_id, company_id)): + if rec.formname: + formname = rec.formname + for rec in db.query ('SELECT rdir_domain FROM mailinglist_tbl WHERE mailinglist_id = %d' % mailinglist_id): + rdir = rec.rdir_domain + for rec in db.query ('SELECT rdir_domain, company_token FROM company_tbl WHERE company_id = %d' % company_id): if rdir is None: rdir = rec.rdir_domain - if not formname is None and not rdir is None: - mailing_id: Optional[int] = None - creation_date: Optional[datetime] = None - for rec in db.queryc ('SELECT mailing_id, creation_date FROM mailing_tbl WHERE company_id = %d AND (deleted = 0 OR deleted IS NULL)' % company_id): - if rec.mailing_id is not None: - mailing_id = rec.mailing_id - creation_date = rec.creation_date - break - if mailing_id is not None and creation_date is not None: - try: - uid = UID ( - company_id = company_id, - mailing_id = mailing_id, - customer_id = customer_id - ) - url = f'{rdir}/form.action' - params: Dict[str, str] = { - 'agnCI': str (company_id), - 'agnFN': formname, - 'agnUID': self.uid_handler.create (uid) - } - logger.info (f'Trigger mail using {url} with {params}') - (success, response) = invoke (url, params = params) - logger.info (f'Subscription request returns "{response}" with "{response.text}"') - resp = response.text.strip () - if not success or not isinstance (resp, str) or not resp.lower ().startswith ('ok'): - logger.error (f'Subscribe formular "{url}" returns error "{resp}"') - except Exception as e: - logger.error (f'Failed to trigger [prot] forumlar using "{url}": {e}') + if company_token is None: + company_token = rec.company_token + if formname is not None and rdir is not None: + mailing_id: Optional[int] = None + creation_date: Optional[datetime] = None + for rec in db.queryc ('SELECT mailing_id, creation_date FROM mailing_tbl WHERE company_id = %d AND (deleted = 0 OR deleted IS NULL)' % company_id): + if rec.mailing_id is not None: + mailing_id = rec.mailing_id + creation_date = rec.creation_date + break + if mailing_id is not None and creation_date is not None: + try: + uid = UID ( + company_id = company_id, + mailing_id = mailing_id, + customer_id = customer_id + ) + url = f'{rdir}/form.action' + params: Dict[str, str] = { + 'agnFN': formname, + 'agnUID': self.uid_handler.create (uid) + } + if company_token: + params['agnCTOKEN'] = company_token + else: + params['agnCI'] = str (company_id) + logger.info (f'Trigger mail using {url} with {params}') + (success, response) = invoke (url, params = params) + logger.info (f'Subscription request returns "{response}" with "{response.text}"') + resp = response.text.strip () + if not success or not isinstance (resp, str) or not resp.lower ().startswith ('ok'): + logger.error (f'Subscribe formular "{url}" returns error "{resp}"') + except Exception as e: + logger.error (f'Failed to trigger [prot] forumlar using "{url}": {e}') + else: + logger.error ('Failed to find active mailing for company %d' % company_id) else: - logger.error ('Failed to find active mailing for company %d' % company_id) - else: - if not formname: - logger.error ('No formular with id #%d found' % formular_id) - if not rdir: - logger.error ('No rdir domain for company #%d/mailinglist #%d found' % (company_id, mailinglist_id)) + if not formname: + logger.error ('No formular with id #%d found' % formular_id) + if not rdir: + logger.error ('No rdir domain for company #%d/mailinglist #%d found' % (company_id, mailinglist_id)) + self.report['customer_id'] = customer_id def execute_is_systemmail (self) -> bool: if self.parsed_email.unsubscribe: - return True - return self.rule.match_header (self.msg, ['systemmail']) is not None + rc = True + else: + rc = self.rule.match_header (self.msg, ['systemmail']) is not None + self.report['systemmail'] = rc + return rc def execute_filter_or_forward (self, db: DB) -> bool: if self.parsed_email.ignore: action = 'ignore' else: match = self.rule.match_header (self.msg, ['filter']) - if not match is None: + if match is not None: if not match[1].action: - action = 'save' + action = 'filtered' else: action = match[1].action else: - action = 'sent' + action = 'forward' fwd: Optional[List[str]] = None - if action == 'sent': + if action == 'forward': fwd = self.parameter.forward if BAV.has_spamassassin and (self.parameter.spam_forward is not None or self.parameter.spam_required is not None): fwd = self.filter_with_spam_assassin (fwd) self.save_message (action) - if action == 'sent': + if action == 'forward': while BAV.x_agn in self.msg: del self.msg[BAV.x_agn] if ( @@ -935,8 +1030,12 @@ def execute_filter_or_forward (self, db: DB) -> bool: self.cinfo.customer_id > 0 ): self.msg[BAV.x_customer] = str (self.cinfo.customer_id) - if fwd is not None: + if fwd: self.sendmail (self.msg, fwd) + self.report['forward'] = ','.join (fwd) + action = 'sent' + else: + action = 'filtered' if self.parameter.autoresponder: if self.parameter.sender and self.header_from is not None and self.header_from.address: @@ -944,7 +1043,7 @@ def execute_filter_or_forward (self, db: DB) -> bool: auto_responder = Autoresponder (self.parameter.autoresponder, sender) if self.parameter.autoresponder_mailing_id: logger.info ('Trigger autoresponder message for %s' % sender) - auto_responder.trigger_message (db, self.bounce, self.cinfo, self.parameter, self.dryrun) + auto_responder.trigger_message (db, self.report, self.bounce, self.cinfo, self.parameter, self.dryrun) else: logger.warning ('Old autorepsonder without content found') else: @@ -967,13 +1066,14 @@ def execute_filter_or_forward (self, db: DB) -> bool: self.parameter.subscribe.mailinglist_id, self.parameter.subscribe.form_id ) + self.report['action'] = action return True def execute_scan_and_unsubscribe (self, db: DB) -> bool: if self.parsed_email.ignore: action = 'ignore' else: - action = 'unspec' + action = 'filtered' scan = self.rule.scan_message (self.cinfo, self.msg, ['hard', 'soft']) if scan: if scan.entry and scan.entry.action: @@ -994,7 +1094,14 @@ def execute_scan_and_unsubscribe (self, db: DB) -> bool: fd.write ('%s;%s;%s;0;%s;timestamp=%s\tmailloop=%s\tserver=%s\n' % (scan.dsn, licence, scan.minfo.mailing_id, scan.minfo.customer_id, ParseTimestamp ().dump (datetime.now ()), scan.etext, fqdn)) except IOError as e: logger.error ('Unable to write %s %s' % (BAV.ext_bouncelog, e)) + self.report['bounce-dsn'] = scan.dsn + self.report['bounce-mailing-id'] = scan.minfo.mailing_id + self.report['bounce-customer-id'] = scan.minfo.customer_id + if scan.etext: + self.report['bounce-error-text'] = scan.etext + action = 'bounce' self.save_message (action) + self.report['action'] = action return True class BAVD (Runtime): @@ -1043,6 +1150,7 @@ def bav_debug (self) -> None: else: print ('--> Filter or forward') ok = bav.execute_filter_or_forward (db) + bav.report.write (ok) if ok: print ('OK') else: @@ -1105,6 +1213,7 @@ def execute (self, size: int) -> None: else: with log ('filter'): ok = bav.execute_filter_or_forward (db) + bav.report.write (ok) except IOError as e: logger.error ('Failed to open %s: %r' % (path, e.args)) except Exception as e: diff --git a/backend/src/script/process/bounce-rules.sh b/backend/src/script/process/bounce-rules.sh index 2a4b90076..47bb22c49 100644 --- a/backend/src/script/process/bounce-rules.sh +++ b/backend/src/script/process/bounce-rules.sh @@ -57,28 +57,44 @@ class Main (CLI): 'section', 'pattern' ) + ote_parser = Lineparser ( + lambda l: l.split (';', 2), + 'name', + 'domains', + 'relays' + ) # + pattern_reset = re.compile ('^## *RESET$') pattern_parameter = re.compile ('^## *PARAMETER: *(.*)$') pattern_bounce_rules = re.compile ('^## *BOUNCE-RULES$') pattern_bav_rules = re.compile ('^## *BAV-RULES$') + pattern_ote = re.compile ('^## *OTE$') section: Literal[ None, 'bounce', - 'bav' + 'bav', + 'ote' ] = None parameter: Optional[Parameter] = None bounce_rules: List[Line] = [] bav_rules: List[Line] = [] + ote_rules: List[Line] = [] for (lineno, line) in enumerate ((_l.strip () for _l in fd), start = 1): if (match := pattern_parameter.match (line)) is not None: parameter = Parameter (match.group (1)) continue + if pattern_reset.match (line) is not None: + section = None + continue if pattern_bounce_rules.match (line) is not None: section = 'bounce' continue if pattern_bav_rules.match (line) is not None: section = 'bav' continue + if pattern_ote.match (line) is not None: + section = 'ote' + continue if section is None or not line or line.startswith ('#'): continue # @@ -87,6 +103,8 @@ class Main (CLI): bounce_rules.append (bounce_parser (line)) elif section == 'bav': bav_rules.append (bav_parser (line)) + elif section == 'ote': + ote_rules.append (ote_parser (line)) except error as e: print (f'Failed to parse {self.caller[0]}:{lineno}:{line} due to: {e}') ok = False @@ -98,6 +116,8 @@ class Main (CLI): ok = False if not self.update_bav_rules (db, bav_rules): ok = False + if not self.update_ote_rules (db, ote_rules): + ok = False db.sync (not self.dryrun and ok) return ok @@ -247,7 +267,7 @@ class Main (CLI): ) new += 1 if self.dryrun or new or disabled or updated: - print (f'Added {new} rules, disable {disabled} rules, update {updated} rules') + print (f'Bounce rules: added {new} rules, disable {disabled} rules, update {updated} rules') return True def invalid_bounce_rule_pattern (self, pattern: str) -> bool: @@ -257,7 +277,87 @@ class Main (CLI): return True else: return False - + + def update_ote_rules (self, db: DB, rules: List[Line]) -> bool: + rc = True + if db.exists (Bounce.ote_table): + def mkdesc (name: str) -> str: + return f'predefined entry: {name}' + def norm (value: str) -> str: + return value.strip () if value else '' + source: Dict[str, Line] = {} + for rule in rules: + if not rule.name: + print ('Empty rule name not allowed') + rc = False + elif rule.name in source: + print (f'Duplicate entry from source: "{rule.name}" found') + rc = False + else: + source[mkdesc (rule.name)] = rule + if not rc: + return False + # + updated = 0 + removed = 0 + added = 0 + for row in db.streamc ( + 'SELECT description, domains, relays ' + f'FROM {Bounce.ote_table} ' + 'WHERE company_id = 0' + ).filter (lambda r: bool (r.description) and r.description in source): + rule = source.pop (row.description) + domains = norm (row.domains) + relays = norm (row.relays) + if domains != rule.domains or relays != rule.relays: + if rule.domains or rule.relays: + if self.dryrun: + print (f'Would update "{row.description}" with: {rule}') + updated += 1 + else: + updated += db.update ( + f'UPDATE {Bounce.ote_table} ' + 'SET domains = :domains, relays = :relays, change_date = CURRENT_TIMESTAMP ' + 'WHERE description = :description AND company_id = 0', + { + 'domains': rule.domains if rule.domains else None, + 'relays': rule.relays if rule.relays else None, + 'description': row.description + } + ) + else: + if self.dryrun: + print (f'Would remove "{row.description}"') + removed += 1 + else: + removed += db.update ( + f'DELETE FROM {Bounce.ote_table} ' + 'WHERE description = :description AND company_id = 0', + { + 'description': row.description + } + ) + for (description, rule) in source.items (): + if rule.domains or rule.relays: + if self.dryrun: + print (f'Would add "{description}" with: {rule}') + added += 1 + else: + added += db.update ( + f'INSERT INTO {Bounce.ote_table} ' + ' (company_id, domains, relays, active, description, creation_date, change_date) ' + 'VALUES ' + ' (0, :domains, :relays, 1, :description, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)', + { + 'domains': rule.domains if rule.domains else None, + 'relays': rule.relays if rule.relays else None, + 'description': description + } + ) + if self.dryrun or updated or removed or added: + print (f'OTE: added {added} rules, removed {removed} rules, update {updated} rules') + return rc + def update_bav_rules (self, db: DB, rules: List[Line]) -> bool: rc = True if db.exists (Bounce.bounce_rule_table): @@ -273,7 +373,7 @@ class Main (CLI): if original != updated: if self.dryrun: diff = '\n'.join ( - difflib.context_diff ( + difflib.unified_diff ( original.split ('\n'), updated.split ('\n'), fromfile = 'original', @@ -300,9 +400,7 @@ fi exit $rc #STOP # -# ## PARAMETER: convert-bounce-count="10", convert-bounce-duration="30", last-click="30", last-open="30", max-age-create="-", max-age-change="-", fade-out="14", expire="1100", threshold="5" -# ## BOUNCE-RULES 50x;;50;400; 51x;;51;410; @@ -357,6 +455,8 @@ EMM-8027;;500;511;stat="no such recipient here|recipient unknown|invalid recipie EMM-8030;;500;511;stat="Mailaddress is administratively disabled" EMM-8029;;500;511;stat="mailbox for .* does not exist" EMM-8135;;474;513;stat="TLS is required, but was not offered" +EMM-8135-4;;4;513;port="465" +EMM-8135-5;;5;513;port="465" EMM-8617;;500;511;stat="No such local user", relay=".firemail.de." EMM-8629;;550;511;stat="recipient rejected" EMM-8634;;571;511;stat="user unknown|no such user", relay=".vol.at.|.yandex.ru.|.antispameurope.com.|.uni-wuerzburg.de.|.bund.de." @@ -460,3 +560,4 @@ Sourceforge Bugreport #2620217;hard;^ Unrouteable address ;soft;552 RCPT TO:<.*> Mailbox disk quota ;soft;Quota exceeded. The recipients mailbox is full. ;soft;^User mailbox exceeds allowed size: .* +## RESET diff --git a/backend/src/script/process/direct-path3.py b/backend/src/script/process/direct-path3.py index 1c172ec70..2321bce63 100644 --- a/backend/src/script/process/direct-path3.py +++ b/backend/src/script/process/direct-path3.py @@ -78,25 +78,36 @@ def doit (self, basename: str) -> None: queue = self.__next_queue () if self.mta (src, target_directory = queue, flush_count = '2'): logger.info ('Unpacked %s in %s' % (src, queue)) - try: - target = ArchiveDirectory.make (self.archive) - except error as e: - logger.error ('Failed to setup archive directory %s: %s' % (self.archive, e)) + if self.archive != os.devnull: + try: + target = ArchiveDirectory.make (self.archive) + except error as e: + logger.error ('Failed to setup archive directory %s: %s' % (self.archive, e)) + target = self.archive + else: target = self.archive else: logger.error ('Failed to unpack %s in %s' % (src, queue)) target = self.recover else: logger.error ('Do not process %s as control file(s) is/are missing' % src) - dst = os.path.join (target, os.path.basename (src)) - try: - shutil.move (src, dst) - except (shutil.Error, IOError, OSError) as e: - logger.error ('Failed to move %s to %s: %s' % (src, dst, str (e))) + if target != os.devnull: + dst = os.path.join (target, os.path.basename (src)) + try: + shutil.move (src, dst) + except (shutil.Error, IOError, OSError) as e: + logger.error ('Failed to move %s to %s: %s' % (src, dst, str (e))) + try: + os.unlink (src) + logger.info (f'Removed file {src} as moving to destination {dst} failed') + except OSError as e: + logger.error ('Failed to remove file %s: %s' % (src, str (e))) + else: try: os.unlink (src) + logger.info (f'Removed file {src} as target is {target}') except OSError as e: - logger.error ('Failed to remove file %s: %s' % (src, str (e))) + logger.error (f'Failed to remove file {src} for target {target}: {e}') else: logger.debug ('Skip requested file %s which is already processed' % src) for path in stamp, final: diff --git a/backend/src/script/process/generate3.py b/backend/src/script/process/generate3.py index 10a603a61..7fa0ebe82 100644 --- a/backend/src/script/process/generate3.py +++ b/backend/src/script/process/generate3.py @@ -733,7 +733,7 @@ def collect_entries_for_sending (self) -> None: if self.invalidate_maildrop_entry (row.status_id, old_status = 0): logger.info ('%s: disabled' % msg) else: - logger.error ('%s: failed to disable' % msg); + logger.error ('%s: failed to disable' % msg) self.db.sync () #}}} class ScheduleGenerate (Schedule): #{{{ diff --git a/backend/src/script/process/recovery3.py b/backend/src/script/process/recovery3.py index 2fe260908..687c63041 100644 --- a/backend/src/script/process/recovery3.py +++ b/backend/src/script/process/recovery3.py @@ -17,7 +17,7 @@ from dataclasses import dataclass from typing import Dict, List, NamedTuple, Pattern, Set from agn3.db import DB -from agn3.definitions import base, fqdn, user +from agn3.definitions import base, fqdn, user, ams from agn3.email import EMail from agn3.emm.config import EMMConfig, Responsibility from agn3.exceptions import error @@ -67,7 +67,7 @@ def __parse_xml (self, path: str) -> None: #{{{ mode = 1 elif mode == 1: mtch = pattern.search (line) - if not mtch is None: + if mtch is not None: current.add (int (mtch.groups ()[0])) self.seen.update (current) except IOError as e: @@ -75,7 +75,7 @@ def __parse_xml (self, path: str) -> None: #{{{ #}}} def __collect (self, pattern: Pattern[str], path: str, remove: bool) -> None: #{{{ files = os.listdir (path) - for fname in [_f for _f in files if not pattern.match (_f) is None]: + for fname in [_f for _f in files if pattern.match (_f) is not None]: fpath = os.path.join (path, fname) if remove: try: @@ -94,15 +94,16 @@ def __collect_track (self, path: str) -> None: #{{{ #}}} def collect_seen (self) -> None: #{{{ self.seen.clear () - pattern = re.compile ('^AgnMail(-[0-9]+)?=D[0-9]{14}=%d=%d=[^=]+=liaMngA\\.(stamp|final|xml\\.gz)$' % (self.company_id, self.mailing_id)) - self.__collect (pattern, self.meta, True) - for sdir in self.check: - spath = os.path.join (self.archive, sdir) - if os.path.isdir (spath): - self.__collect (pattern, spath, False) - track = os.path.join (self.track, str (self.company_id), str (self.mailing_id)) - if os.path.isdir (track): - self.__collect_track (track) + if self.status_field not in ('A', 'T'): + pattern = re.compile ('^AgnMail(-[0-9]+)?=D[0-9]{14}=%d=%d=[^=]+=liaMngA\\.(stamp|final|xml\\.gz)$' % (self.company_id, self.mailing_id)) + self.__collect (pattern, self.meta, True) + for sdir in self.check: + spath = os.path.join (self.archive, sdir) + if os.path.isdir (spath): + self.__collect (pattern, spath, False) + track = os.path.join (self.track, str (self.company_id), str (self.mailing_id)) + if os.path.isdir (track): + self.__collect_track (track) #}}} def create_filelist (self) -> None: #{{{ if self.seen: @@ -152,13 +153,21 @@ def cleanup (self, success: bool) -> None: def executor (self) -> bool: log.set_loglevel ('debug') try: - with Lock (): - with log ('collect'): - self.collect_mailings () - with log ('recover'): - self.recover_mailings () - with log ('report'): - self.report_mailings () + if ams and self.startup_delay > 0: + with log ('delay'): + delay = self.startup_delay + while self.running and delay > 0: + delay -= 1 + time.sleep (1) + + if self.running: + with Lock (): + with log ('collect'): + self.collect_mailings () + with log ('recover'): + self.recover_mailings () + with log ('report'): + self.report_mailings () return True except error as e: logger.exception ('Failed recovery: %s' % e) @@ -235,18 +244,35 @@ def collect_mailings (self) -> None: #{{{ self.db.sync () # query = ( - 'SELECT status_id, mailing_id, company_id, status_field, senddate, processed_by ' + 'SELECT status_id, mailing_id, company_id, status_field, genchange, senddate, processed_by ' 'FROM maildrop_status_tbl ' - 'WHERE genstatus IN (1, 2) AND genchange > :expire AND genchange < CURRENT_TIMESTAMP AND status_field = \'W\'' + 'WHERE genstatus IN (1, 2) AND genchange > :expire AND genchange < CURRENT_TIMESTAMP AND status_field IN (\'A\', \'T\', \'W\')' ) + limit_restart_test_and_admin_mailings = datetime.now () - timedelta (hours = 1) for row in (self.db.streamc (query, {'expire': expire}) .filter (lambda r: r.company_id in self.responsibilities and (not r.processed_by or r.processed_by == fqdn)) .filter (lambda r: self.restrict_to_mailings is None or r.mailing_id in self.restrict_to_mailings) .filter (lambda r: self.__mailing_valid (r.mailing_id)) ): - check = self.__make_range (row.senddate, now) - self.mailings.append (Mailing (row.status_id, row.status_field, row.mailing_id, row.company_id, check)) - logger.info ('Mark mailing %d (%s) for recovery' % (row.mailing_id, self.__mailing_name (row.mailing_id))) + mailing_name = self.__mailing_name (row.mailing_id) + if row.status_field == 'W' or (row.genstatus == 1 and row.genchange is not None and row.genchange > limit_restart_test_and_admin_mailings): + check = self.__make_range (row.senddate, now) + self.mailings.append (Mailing (row.status_id, row.status_field, row.mailing_id, row.company_id, check)) + logger.info ('Mark mailing %d (%s) for recovery' % (row.mailing_id, mailing_name)) + else: + if self.db.update ( + 'UPDATE maildrop_status_tbl ' + 'SET genstatus = 4 ' + 'WHERE status_id = :status_id', + { + 'status_id': row.status_id + } + ) > 0: + logger.info (f'Mark {row.status_field}-mailing {row.mailing_id} ({mailing_name}) as finished in an unknown state') + else: + logger.warning (f'Failed to mark {row.status_id} for finished') + if not self.dryrun: + self.db.sync () self.mailings.sort (key = lambda m: m.status_id) logger.info ('Found %d mailing(s) to recover' % len (self.mailings)) #}}} @@ -268,7 +294,7 @@ def recover_mailings (self) -> None: #{{{ m.create_filelist () count = 0 for (total_mails, ) in self.db.query ('SELECT total_mails FROM mailing_backend_log_tbl WHERE status_id = :sid', {'sid': m.status_id}): - if not total_mails is None and total_mails > count: + if total_mails is not None and total_mails > count: count = total_mails m.set_generated_count (count) self.db.update ('DELETE FROM mailing_backend_log_tbl WHERE status_id = :sid', {'sid': m.status_id}) @@ -309,7 +335,7 @@ def recover_mailings (self) -> None: #{{{ if m.last: current = 0 for (currentMails, ) in self.db.query ('SELECT current_mails FROM mailing_backend_log_tbl WHERE status_id = :sid', {'sid': m.status_id}): - if not currentMails is None: + if currentMails is not None: current = currentMails if current != m.current: logger.debug (f'Mailing {m.mailing_id} has created {current:,d} vs. {m.current:,d} when last checked') @@ -341,7 +367,7 @@ def recover_mailings (self) -> None: #{{{ if not m.active: count = 0 for (total_mails, ) in self.db.query ('SELECT total_mails FROM mailing_backend_log_tbl WHERE status_id = :sid', {'sid': m.status_id}): - if not total_mails is None: + if total_mails is not None: count = total_mails count += len (m.seen) self.db.update ('UPDATE mailing_backend_log_tbl SET total_mails = :cnt, current_mails = :cnt WHERE status_id = :sid', {'sid': m.status_id, 'cnt': count}) diff --git a/backend/src/script/process/slrtscn3.py b/backend/src/script/process/slrtscn3.py index ea1048ce3..9625125e0 100644 --- a/backend/src/script/process/slrtscn3.py +++ b/backend/src/script/process/slrtscn3.py @@ -208,11 +208,13 @@ def write_bounce (self, recipient: str ) -> None: try: + recipient = recipient.strip ('<>') info = [ f'timestamp={timestamp.year:04d}-{timestamp.month:02d}-{timestamp.day:02d} {timestamp.hour:02d}:{timestamp.minute:02d}:{timestamp.second:02d}', f'stat={reason}', f'queue_id={queue_id}', f'relay={relay}', + f'to={recipient}', f'server={fqdn}' ] self.plugin ().add_bounce_info (dsn, licence_id, mailing_id, customer_id, info) @@ -226,7 +228,7 @@ def write_bounce (self, fqdn, dsn, relay, - recipient.strip ('<>').split ('@')[-1], + recipient.split ('@')[-1], (f';{reason}' if not dsn.startswith ('2') else '') )) except IOError as e: diff --git a/backend/src/script/process/trigger3.py b/backend/src/script/process/trigger3.py index 9dce886ae..8a94a1ce8 100644 --- a/backend/src/script/process/trigger3.py +++ b/backend/src/script/process/trigger3.py @@ -30,7 +30,7 @@ logger = logging.getLogger (__name__) # class MergerProxy (Protocol): - class Merger: + class Merger (Protocol): @staticmethod def remote_control (command: str, parameter: str) -> str: ... diff --git a/backend/src/script/process/update3.py b/backend/src/script/process/update3.py index 1e6bfcb66..808166f0d 100644 --- a/backend/src/script/process/update3.py +++ b/backend/src/script/process/update3.py @@ -17,6 +17,7 @@ from functools import partial from datetime import datetime, timedelta from dataclasses import dataclass, field +from urllib.parse import unquote from types import TracebackType from typing import Any, Callable, Final, Optional, Union from typing import DefaultDict, Dict, Iterator, List, NamedTuple, Pattern, Set, TextIO, Tuple, Type @@ -40,7 +41,7 @@ from agn3.runtime import Runtime from agn3.stream import Stream from agn3.template import Template -from agn3.tools import atob, atoi, listsplit +from agn3.tools import abstract, atob, atoi, listsplit from agn3.tracker import Key, Tracker # logger = logging.getLogger (__name__) @@ -114,7 +115,29 @@ def __init__ (self, logpath: str, name: str) -> None: self.logpath = logpath self.name = name self.target = os.path.join (self.logpath, self.name) - create_path (self.target) + if not create_path (self.target): + self.respool () + + aborted = re.compile ('^([0-9]+-[0-9]+)-[0-9.]+$') + def respool (self) -> None: + now = time.time () + for filename in sorted (os.listdir (self.target)): + if (mtch := self.aborted.match (filename)) is not None: + old = os.path.join (self.target, filename) + new = os.path.join (self.target, mtch.group (1)) + try: + if os.path.isfile (new): + os.remove (old) + logger.info (f'{self.name}: removed duplicate spool file {old} due to existing {new}') + else: + st = os.stat (old) + if now < st.st_mtime + 6 * 60 * 60: + os.rename (old, new) + logger.info (f'{self.name}: respool {old} as {new}') + else: + logger.warning (f'{self.name}: do not respool {old} as {new} due to outdated file') + except OSError as e: + logger.warning (f'{self.name}: failed to respool {old} as {new}: {e}') def unpack (self, s: str) -> Tuple[int, int]: (nr, seq) = (int (_v) for _v in s.split ('-')) @@ -191,6 +214,7 @@ class Update: #{{{ ] name = 'update' path = '/dev/null' + known_mediatypes = Stream (MediaType.__members__.values ()).map (lambda m: m.value).set () timestamp_parser = ParseTimestamp () def __init__ (self) -> None: directory = os.path.dirname (self.path) @@ -260,12 +284,16 @@ def update_prepare (self) -> bool: return True def update_finished (self) -> bool: return True + + def update_start (self, db: DB) -> bool: - raise error ('Need to overwrite update_start in your subclass') + abstract () + def update_end (self, db: DB) -> bool: - raise error ('Need to overwrite update_end in your subclass') + abstract () + def update_line (self, db: DB, line: str) -> bool: - raise error ('Need to overwrite update_line in your subclass') + abstract () def execute (self, is_active: Callable[[], bool], delay: Optional[int]) -> None: self.setup () @@ -406,20 +434,33 @@ class UpdateBounce (Update): #{{{ __slots__ = [ 'mailing_map', 'igcount', 'sucount', 'sbcount', 'hbcount', 'dupcount', 'blcount', 'rvcount', 'ccount', - 'translate', 'delayed_processing', 'cache', 'succeeded', + 'translate', + 'delayed_processing', 'cache', 'succeeded', 'has_mailtrack', 'has_mailtrack_last_read', 'sys_encrypted_sending_enabled', 'bounce_mark_duplicate' ] name = 'bounce' path = os.path.join (base, 'log', 'extbounce.log') class Info: #{{{ + relay_pattern = re.compile ('^([^[]*)(\\[([^]]*)\\])?(:([0-9]+))?$') def __init__ (self, info: str) -> None: self.info = info self.map: Dict[str, str] = {} for elem in info.split ('\t'): parts = elem.split ('=', 1) if len (parts) == 2: - self.map[parts[0]] = parts[1] + if parts[0] == 'relay': + m = self.relay_pattern.match (parts[1]) + if m is not None: + (relay, _, ip, _, port) = m.groups () + for (name, value) in ('ip', ip), ('port', port): + if value: + self.map[name] = value + else: + relay = (parts[1].split (':')[0]).split ('[')[0] + self.map[parts[0]] = relay + else: + self.map[parts[0]] = parts[1] elif len (parts) == 1: self.map['stat'] = elem @@ -448,7 +489,7 @@ class Translate: #{{{ Detail.SoftbounceMailbox: [421, 422, 520, 521, 522, 523, 524], Detail.HardbounceOther: [531], Detail.HardbounceReceiver: [511, 513, 516, 517, 572], - Detail.HardbounceSystem: [512, 518] + Detail.HardbounceSystem: [512, 518] } class Pattern: control_pattern = re.compile ('^/(.*)/([a-z]*)$') @@ -515,11 +556,7 @@ def __init__ (self, data: str, debug: bool) -> None: def match (self, infos: UpdateBounce.Info) -> bool: for (key, pattern) in [(_k.lower (), _v) for (_k, _v) in self.checks.items ()]: value = infos[key] - if value is None: - return False - if key == 'relay': - value = (value.split (':')[0]).split ('[')[0] - if pattern.search (value) is None: + if value is None or pattern.search (value) is None: return False return True @@ -681,7 +718,7 @@ def __init__ (self) -> None: self.has_mailtrack: Dict[int, bool] = {} self.has_mailtrack_last_read = 0 self.sys_encrypted_sending_enabled: Cache[int, bool] = Cache (timeout = '30m') - self.bounce_mark_duplicate: Dict[int, bool] = {} + self.bounce_mark_duplicate = EMMCompany.Enable () def done (self) -> None: self.delayed_processing.done () @@ -798,10 +835,10 @@ def update_start (self, db: DB) -> bool: self.blcount = 0 self.rvcount = 0 self.ccount = 0 - self.bounce_mark_duplicate = (Stream (EMMCompany (db = db, keys = ['bounce-mark-duplicate']).scan_all ()) - .map (lambda v: (v.company_id, atob (v.value))) - .dict () - ) + with EMMCompany (db = db, keys = [ + 'bounce-mark-duplicate', + ]) as emmcompany: + self.bounce_mark_duplicate = emmcompany.enabled ('bounce-mark-duplicate', default = True) self.succeeded.clear () self.translate.clear () self.translate.setup (db) @@ -817,7 +854,13 @@ def update_end (self, db: DB) -> bool: if self.succeeded: logger.info ('Add {mails:,d} mails to {mailings:d} mailings'.format (mails = cast (int, sum (self.succeeded.values ())), mailings = len (self.succeeded))) for mailing_id in sorted (self.succeeded): - db.update ('UPDATE mailing_tbl SET delivered = delivered + :success WHERE mailing_id = :mailing_id', {'success': self.succeeded[mailing_id], 'mailing_id': mailing_id}) + db.update ( + 'UPDATE mailing_tbl SET delivered = COALESCE(delivered, 0) + :success WHERE mailing_id = :mailing_id', + { + 'success': self.succeeded[mailing_id], + 'mailing_id': mailing_id + } + ) db.sync () logger.info ('Found %d hardbounces (%d duplicates), %d softbounces (%d written), %d successes, %d blocklisted, %d revoked, %d ignored in %d lines' % (self.hbcount, self.dupcount, self.sbcount, (self.sbcount - self.ccount), self.sucount, self.blcount, self.rvcount, self.igcount, self.lineno)) if self.delayed_processing: @@ -868,9 +911,6 @@ def update_line (self, db: DB, line: str) -> bool: return True breakdown.timestamp = datetime.now () # - if breakdown.detail == Detail.Success or breakdown.detail in Detail.Hardbounces: - pass - # if breakdown.detail == Detail.Success: self.delayed_processing.drop (record.mailing_id, record.media, record.customer_id) self.succeeded[record.mailing_id] += 1 @@ -978,7 +1018,7 @@ def update_line (self, db: DB, line: str) -> bool: and record.media == MediaType.EMAIL.value and - self.bounce_mark_duplicate.get (company_id, self.bounce_mark_duplicate.get (0, True)) + self.bounce_mark_duplicate (company_id) ): rq = db.querys ( 'SELECT email ' @@ -1020,9 +1060,6 @@ def update_line (self, db: DB, line: str) -> bool: customer_ids.append (record.customer_id) self.dupcount += 1 db.sync () - # - for customer_id in customer_ids: - pass except error as e: logger.error ('Unable to unsubscribe %r for company %d from database using %s: %s' % (data, company_id, query, e)) rc = False @@ -1047,7 +1084,6 @@ class UpdateAccount (Update): #{{{ track_path: Final[str] = os.path.join (base, 'var', 'run', 'update-account.track') track_section_mailing: Final[str] = 'mailing' track_section_status: Final[str] = 'status' - known_mediatypes = Stream (MediaType.__members__.values ()).map (lambda m: m.value).set () class Mailcheck: __slots__ = ['workstatus', 'lastcheck', 'total', 'info_sent'] persist_path: Final[str] = os.path.join (base, 'var', 'run', 'mailcheck.persist') @@ -1091,10 +1127,11 @@ def check_workstatus (self, db: DB, mailing_id: int) -> bool: count = db.update ( 'UPDATE mailing_tbl ' 'SET work_status = :work_status_sending ' - 'WHERE mailing_id = :mailing_id AND (work_status IS NULL OR work_status = :work_status_finished)', + 'WHERE mailing_id = :mailing_id AND (work_status IS NULL OR work_status IN (:work_status_finished, :work_status_edit))', { 'work_status_sending': WorkStatus.Sending.value, 'work_status_finished': WorkStatus.Finished.value, + 'work_status_edit': WorkStatus.Edit.value, 'mailing_id': mailing_id }, commit = True @@ -1102,15 +1139,29 @@ def check_workstatus (self, db: DB, mailing_id: int) -> bool: if count > 0: logger.info (f'{mailing_id}: updated work status to "{WorkStatus.Sending.value}"') return True + # + rq = db.querys ( + 'SELECT work_status, deleted ' + 'FROM mailing_tbl ' + 'WHERE mailing_id = :mailing_id', + { + 'mailing_id': mailing_id + } + ) + if rq is None: + logger.warning (f'{mailing_id}: no mailing found to update workstatus') + elif rq.work_status and rq.work_status not in (WorkStatus.Sending.value, WorkStatus.Sent.value, WorkStatus.Cancel.value): + deleted = ' [deleted]' if rq.deleted else '' + logger.warning (f'{mailing_id}{deleted}: do not change workstatus from "{rq.work_status}" to "{WorkStatus.Sending.value}"') else: - logger.debug (f'{mailing_id}: no need to update workstatus') + logger.debug (f'{mailing_id}: no need to update workstatus from {rq.work_status} to "{WorkStatus.Sending.value}"') return False class Mailing (NamedTuple): mailing_id: int company_id: int statmail: Optional[str] - + def update (self) -> None: interval = unit.parse (syscfg.get (f'{program}:mailcheck-interval', '1m'), default = 60) mailcheck_limit = unit.parse (syscfg.get (f'{program}:mailcheck-limit', '7d'), default = 7 * 24 * 60 * 60) @@ -1131,12 +1182,13 @@ def update (self) -> None: 'WHERE mdrop.status_field = :status_field AND ' ' mdrop.senddate >= :limit AND ' ' mdrop.genstatus = 3 AND ' - ' (mt.work_status IS NULL OR mt.work_status = :work_status_finished) AND ' + ' (mt.work_status IS NULL OR mt.work_status IN (:work_status_finished, :work_status_edit)) AND ' ' mt.deleted = 0', { 'status_field': 'W', 'limit': limit, - 'work_status_finished': WorkStatus.Finished.value + 'work_status_finished': WorkStatus.Finished.value, + 'work_status_edit': WorkStatus.Edit.value } ) .filter (lambda r: r.company_id in responsibility) @@ -1252,7 +1304,6 @@ def update (self) -> None: commit = True ) logger.info (f'{mailing.mailing_id}: set work status to sent') - pass with Ignore (KeyError): del self.total[mailing.mailing_id] with Ignore (KeyError): @@ -1350,8 +1401,8 @@ def __init__ (self) -> None: self.control_parser = Tokenparser ( Field ('licence_id', int, source = 'licence'), Field ('owner', int), - Field ('bcc_count', int, optional = True, default = int, source = 'bcc-count'), - Field ('bcc_bytes', int, optional = True, default = int, source = 'bcc-bytes') + Field ('bcc_count', int, optional = True, default = lambda n: 0, source = 'bcc-count'), + Field ('bcc_bytes', int, optional = True, default = lambda n: 0, source = 'bcc-bytes') ) self.data_parser = Tokenparser ( Field ('company_id', int, source = 'company'), @@ -1363,11 +1414,11 @@ def __init__ (self) -> None: Field ('mailtype', int, source = 'subtype'), Field ('no_of_mailings', int, source = 'count'), Field ('no_of_bytes', int, source = 'bytes'), - Field ('skip', int, optional = True, default = int), - Field ('chunks', int, optional = True, default = lambda: 1), + Field ('skip', int, optional = True, default = lambda n: 0), + Field ('chunks', int, optional = True, default = lambda n: 1), Field ('blocknr', int, source = 'block'), 'mailer', - Field ('timestamp', ParseTimestamp (), optional = True, default = lambda: datetime.now ()) + Field ('timestamp', ParseTimestamp (), optional = True, default = lambda n: datetime.now ()) ) def setup (self) -> None: @@ -1565,7 +1616,7 @@ class UpdateMailtrack (Update): #{{{ Field ('company_id', int), Field ('mailing_id', int), Field ('maildrop_status_id', int), - Field ('customer_ids', lambda n: [int (_n) for _n in n.split (',') if _n]) + 'customers' ) @dataclass class CompanyCounter: @@ -1579,9 +1630,9 @@ def __init__ (self) -> None: self.count = 0 self.insert_statement = ( 'INSERT INTO {table} ' - ' (company_id, mailing_id, maildrop_status_id, customer_id, timestamp) ' + ' (company_id, mailing_id, maildrop_status_id, customer_id, mediatype, timestamp) ' 'VALUES ' - ' (:company_id, :mailing_id, :maildrop_status_id, :customer_id, :timestamp)' + ' (:company_id, :mailing_id, :maildrop_status_id, :customer_id, :mediatype, :timestamp)' .format (table = self.mailtrack_process_table) ) self.max_count = 0 @@ -1597,6 +1648,7 @@ def __init__ (self) -> None: ' mailing_id number,\n' ' maildrop_status_id number,\n' ' customer_id number,\n' + ' mediatype number,\n' ' timestamp date\n' '){tablespace}' .format (table = self.mailtrack_process_table, tablespace = tablespace_expr) @@ -1606,6 +1658,7 @@ def __init__ (self) -> None: ' mailing_id int(11),\n' ' maildrop_status_id int(11),\n' ' customer_id integer unsigned,\n' + ' mediatype integer unsigned,\n' ' timestamp timestamp\n' ')' .format (table = self.mailtrack_process_table) @@ -1628,6 +1681,13 @@ def __init__ (self) -> None: .format (prefix = mailtrack_index_prefix, id = index_id, column = index_column, table = self.mailtrack_process_table) ) )) + else: + if 'mediatype' not in {_f.name for _f in db.layout (self.mailtrack_process_table, normalize = True)}: + db.execute (db.qselect ( + oracle = 'ALTER TABLE {table} ADD mediatype number'.format (table = self.mailtrack_process_table), + mysql = 'ALTER TABLE {table} ADD mediatype INTEGER UNSIGNED'.format (table = self.mailtrack_process_table) + )) + logger.info ('added missing column "mediatype" to {table}'.format (table = self.mailtrack_process_table)) def update_start (self, db: DB) -> bool: db.execute ('TRUNCATE TABLE %s' % self.mailtrack_process_table) @@ -1706,22 +1766,32 @@ def update_line (self, db: DB, line: str) -> bool: record = UpdateMailtrack.line_parser (line) if record.licence_id != licence: logger.debug (f'{record.licence_id}: ignore foreign licence id (own is {licence})') - elif record.customer_ids: + elif record.customers: data = { 'timestamp': record.timestamp, 'company_id': record.company_id, 'mailing_id': record.mailing_id, 'maildrop_status_id': record.maildrop_status_id } - for customer_id in record.customer_ids: + customer_id: int + mediatype: Optional[int] + count = 0 + for entry in record.customers.split (','): + try: + (customer_id, mediatype) = (int (_e) for _e in entry.split ('/', 1)) + except ValueError: + customer_id = int (entry) + mediatype = -1 data['customer_id'] = customer_id + data['mediatype'] = mediatype if mediatype in self.known_mediatypes else None db.update (self.insert_statement, data) + count += 1 self.count += 1 if self.count % 10000 == 0: db.sync () logger.info (f'now at #{self.count:,d}') company = self.companies[record.company_id] - company.count += len (record.customer_ids) + company.count += count company.mailings.add (record.mailing_id) except Exception as e: logger.warning (f'{line}: invalid line: {e}') @@ -1737,8 +1807,8 @@ def __update_mailtracking (self, db: DB, company_id: int, counter: UpdateMailtra with db.request () as cursor: count = cursor.update ( 'INSERT INTO {mailtrack_table} ' - ' (mailing_id, maildrop_status_id, customer_id, timestamp) ' - 'SELECT mailing_id, maildrop_status_id, customer_id, timestamp ' + ' (mailing_id, maildrop_status_id, customer_id, mediatype, timestamp) ' + 'SELECT mailing_id, maildrop_status_id, customer_id, mediatype, timestamp ' 'FROM {table} WHERE company_id = :company_id' .format (mailtrack_table = mailtrack_table, table = self.mailtrack_process_table), { @@ -1921,6 +1991,106 @@ def update_line (self, db: DB, line: str) -> bool: else: return True #}}} +class UpdateMailloop (Update): #{{{ + __slots__ = [] + name = 'mailloop' + path = os.path.join (base, 'log', 'mailloop.log') + mailloop_log_table: Final[str] = 'mailloop_log_tbl' + line_parser = Lineparser ( + lambda a: a.split (';', 6), + Field ('licence_id', int), + Field ('rid', lambda s: {'0': 'internal'}.get (s, s)), + Field ('timestamp', Update.timestamp_parser), + 'action', + Field ('status', lambda s: s == '+'), + Field ('info', lambda s: (Stream (s.split ('\t')) + .map (lambda e: e.split ('=', 1)) + .filter (lambda kv: len (kv) == 2) + .map (lambda kv: (kv[0], unquote (kv[1], errors = 'backslashreplace'))) + .dict () + )) + ) + def update_start (self, db: DB) -> bool: + return True + + def update_end (self, db: DB) -> bool: + return True + + def update_line (self, db: DB, line: str) -> bool: + try: + record = UpdateMailloop.line_parser (line) + if record.licence_id != licence: + logger.debug (f'{record.licence_id}: ignore foreign licence id (own is {licence})') + else: + def iget (key: str) -> Optional[int]: + try: + return int (record.info[key]) + except: + return None + remark: List[str] = [] + for (key, display) in ( + ('sent', 'mail sent to {value}'), + ('unsubscribe', 'unsubscribed using remark "{value}"'), + ): + if bool (value := record.info.get (key)): + remark.append (display.format (value = value)) + # + # If spam reporting is active and a spam core has reached the limits + with Ignore (KeyError, ValueError): + spam_score = float (record.info['spam-score']) + spam_consequence = record.info['spam-consequence'] + remark.append (f'incoming message has a spam score of {spam_score:.2f} and is {spam_consequence}') + # + # If an autoresponder is in use + with Ignore (KeyError, ValueError): + mailing_id = int (record.info['autoresponder-mailing-id']) + if mailing_id > 0: + failure = record.info.get ('autoresponder-failure') + success = record.info.get ('autoresponder-success') + if failure or success: + remark.append ('autoresponder mailing {mailing_id} {status}: {message}'.format ( + mailing_id = mailing_id, + status = 'succeeded' if success else 'failed', + message = success if success else failure + )) + # + # If the message had been considered as a bounce + with Ignore (KeyError, ValueError): + bounce_dsn = record.info['bounce-dsn'] + mailing_id = int (record.info['bounce-mailing-id']) + remark.append (f'got bounce with DSN {bounce_dsn} on mailing_id {mailing_id}') + data = { + 'rid': record.rid, + 'timestamp': record.timestamp, + 'status': 1 if record.status else 0, + 'action': record.action if record.action in ('sent', 'filtered', 'ignore', 'unsubscribe', 'bounce') else 'drop', + 'company_id': iget ('company_id'), + 'mailing_id': iget ('mailing_id'), + 'customer_id': iget ('customer_id'), + 'remark': ', '.join (remark) if remark else None + } + db.update ( + db.qselect ( + oracle = ( + f'INSERT INTO {UpdateMailloop.mailloop_log_table} ' + ' (mailloop_log_id, rid, timestamp, status, company_id, mailing_id, customer_id, action, remark) ' + 'VALUES ' + f' ({UpdateMailloop.mailloop_log_table}_seq.nextval, :rid, :timestamp, :status, :company_id, :mailing_id, :customer_id, :action, :remark)' + ), mysql = ( + f'INSERT INTO {UpdateMailloop.mailloop_log_table} ' + ' (rid, timestamp, status, company_id, mailing_id, customer_id, action, remark) ' + 'VALUES ' + ' (:rid, :timestamp, :status, :company_id, :mailing_id, :customer_id, :action, :remark)' + ) + ), + data + ) + except Exception as e: + logger.warning (f'{line}: invalid line: {e}') + return False + else: + return True +#}}} # class Main (Runtime): def supports (self, option: str) -> bool: diff --git a/backend/src/script/tools/EMT_lib/ApplicationStatusMenu.py b/backend/src/script/tools/EMT_lib/ApplicationStatusMenu.py index d1feaa7c1..2eaa22a58 100644 --- a/backend/src/script/tools/EMT_lib/ApplicationStatusMenu.py +++ b/backend/src/script/tools/EMT_lib/ApplicationStatusMenu.py @@ -9,6 +9,7 @@ # # #################################################################################################################################################################################################################################################################### import os +import stat import sys import logging import datetime @@ -16,6 +17,8 @@ import getpass import time +from pathlib import Path + from EMT_lib import Colors from EMT_lib import DbConnector from EMT_lib import EMTUtilities @@ -48,6 +51,23 @@ def readApplicationStatus(colorize = True): if not EMTUtilities.sslIsAvailable(): statusText += errorColorCode + "SSL connections are not supported by the current python installation" + defaultColorCode + "\n" + # File permissions + if DbConnector.dbcfgPropertiesFilePath != None: + if os.path.isfile(DbConnector.dbcfgPropertiesFilePath): + groupName = Path(DbConnector.dbcfgPropertiesFilePath).group() + dbcfgStat = os.stat(DbConnector.dbcfgPropertiesFilePath).st_mode + groupReadable = bool(dbcfgStat & stat.S_IRGRP) + if "dbcfg" != groupName and Environment.applicationName == "EMM": + statusText += errorColorCode + "DBCFG: Group of file '" + DbConnector.dbcfgPropertiesFilePath + "' is not 'dbcfg'" + defaultColorCode + "\n" + elif not groupReadable: + statusText += errorColorCode + "DBCFG: Group is missing read permission on file '" + DbConnector.dbcfgPropertiesFilePath + "'" + defaultColorCode + "\n" + else: + statusText += "DBCFG: OK" + "\n" + else: + statusText += errorColorCode + "DBCFG: File '" + DbConnector.dbcfgPropertiesFilePath + "' is missing" + defaultColorCode + "\n" + else: + statusText += errorColorCode + "DBCFG: File is not configured" + defaultColorCode + "\n" + dbcfgEntry = DbConnector.dbcfgProperties[DbConnector.applicationDbcfgEntryName] if DbConnector.applicationDbcfgEntryName in DbConnector.dbcfgProperties else None if not "oracle" in Environment.allowedDbmsSystems and not DbConnector.isMysqlDriverModuleAvailable() and not DbConnector.isMariadbDriverModuleAvailable(): statusText += errorColorCode + "The database vendor " + dbcfgEntry["dbms"] + " is not supported by this python installation" + defaultColorCode + "\n" @@ -164,6 +184,23 @@ def readApplicationStatus(colorize = True): logging.exception("Database table emm_db_errorlog_tbl") statusText += errorColorCode + "Database table emm_db_errorlog_tbl: ERROR" + defaultColorCode + "\n" + # Mandatory configuration values + birtUrlLinesCount = DbConnector.selectValue("SELECT COUNT(*) FROM config_tbl WHERE class = 'birt' AND name = 'url' AND value != '[to be defined]'") + if birtUrlLinesCount == 0: + statusText += errorColorCode + "BirtURL configvalue: ERROR ('birt.url' not set)" + defaultColorCode + "\n" + else: + statusText += "BirtURL configvalue: OK" + "\n" + undefinedMailAdresses = DbConnector.select("SELECT name FROM config_tbl WHERE class = 'mailaddress' AND value = '[to be defined]'") + if undefinedMailAdresses != None and len(undefinedMailAdresses) > 0: + undefinedMailAdressesString = "" + for row in undefinedMailAdresses: + if len(undefinedMailAdressesString) > 0: + undefinedMailAdressesString += ", " + undefinedMailAdressesString += row[0] + statusText += errorColorCode + "Mailaddresses: ERROR (" + undefinedMailAdressesString + " not set)" + defaultColorCode + "\n" + else: + statusText += "Mailaddresses: OK" + "\n" + # Jobqueue status try: if DbConnector.emmDbVendor == "oracle": @@ -289,7 +326,7 @@ def readApplicationStatus(colorize = True): processOutput = subprocess.check_output("ps ux | grep -v grep | grep org.apache.catalina | grep '/home/console'", shell=True).decode("UTF-8") elif Environment.isEmmRdirServer: processOutput = subprocess.check_output("ps ux | grep -v grep | grep org.apache.catalina | grep '/home/rdir'", shell=True).decode("UTF-8") - if processOutput is None or processOutput.strip() == "": + if EMTUtilities.isBlank(processOutput): statusText += errorColorCode + Environment.applicationName + "Application is NOT running" + defaultColorCode + "\n" else: if len(processOutput) > 0: @@ -313,64 +350,52 @@ def readApplicationStatus(colorize = True): statusText += errorColorCode + "Error while checking for " + Environment.applicationName + "Application running" + defaultColorCode + "\n" # Backend status - if Environment.isOpenEmmServer: - applicationUserName = "openemm" - elif Environment.isEmmRdirServer: - applicationUserName = "rdir" - elif Environment.isEmmMergerServer: - applicationUserName = "merger" - elif Environment.isEmmMailerServer: - applicationUserName = "mailout" - elif Environment.isEmmMailloopServer: - applicationUserName = "mailloop" - else: - applicationUserName = "console" - - if (Environment.isOpenEmmServer or Environment.isEmmMergerServer or Environment.isEmmMailerServer or Environment.isEmmMailloopServer) and os.path.isfile("/home/" + applicationUserName + "/bin/backend.sh"): - try: - # Text output comes via stderr - if EMTUtilities.hasRootPermissions(): - processOutput = subprocess.check_output("su -c \"/home/" + applicationUserName + "/bin/backend.sh status\" " + applicationUserName, stderr=subprocess.STDOUT, shell=True).decode("UTF-8") - else: - processOutput = subprocess.check_output("/home/" + applicationUserName + "/bin/backend.sh status", stderr=subprocess.STDOUT, shell=True).decode("UTF-8") - if processOutput is None or processOutput.strip() == "": - statusText += errorColorCode + Environment.applicationName + "Backend is NOT installed or NOT running" + defaultColorCode + "\n" - else: - if len(processOutput) > 0: - backendStates = {} - for processOutputLine in processOutput.splitlines(): - items = processOutputLine.split(":") - if len(items) > 1: - state = items[1].strip() - if "(" in state: - state = state[0:state.index("(")].strip() - if not state in backendStates: - backendStates[state] = [] - backendStates[state].append(items[0].strip()) - - if len(backendStates) > 0: - for state in backendStates: - if state == "running" or state == "ok": - stateColorCode = defaultColorCode - else: - stateColorCode = errorColorCode - statusText += stateColorCode + Environment.applicationName + " Backend " + state + ": " + ", ".join(backendStates[state]) + defaultColorCode + "\n" + if Environment.isOpenEmmServer or Environment.isEmmMergerServer or Environment.isEmmMailerServer or Environment.isEmmMailloopServer: + if os.path.isfile("/home/" + Environment.getBackendApplicationUserName() + "/bin/backend.sh"): + try: + # Text output comes via stderr + if EMTUtilities.hasRootPermissions(): + processOutput = subprocess.check_output("su -c \"/home/" + Environment.getBackendApplicationUserName() + "/bin/backend.sh status\" " + Environment.getBackendApplicationUserName(), stderr=subprocess.STDOUT, shell=True).decode("UTF-8") + else: + processOutput = subprocess.check_output("/home/" + Environment.getBackendApplicationUserName() + "/bin/backend.sh status", stderr=subprocess.STDOUT, shell=True).decode("UTF-8") + if EMTUtilities.isBlank(processOutput): + statusText += errorColorCode + Environment.applicationName + "Backend is NOT installed or NOT running" + defaultColorCode + "\n" + else: + if len(processOutput) > 0: + backendStates = {} + for processOutputLine in processOutput.splitlines(): + items = processOutputLine.split(":") + if len(items) > 1: + state = items[1].strip() + if "(" in state: + state = state[0:state.index("(")].strip() + if not state in backendStates: + backendStates[state] = [] + backendStates[state].append(items[0].strip()) + + if len(backendStates) > 0: + for state in backendStates: + if state == "running" or state == "ok": + stateColorCode = defaultColorCode + else: + stateColorCode = errorColorCode + statusText += stateColorCode + Environment.applicationName + " Backend " + state + ": " + ", ".join(backendStates[state]) + defaultColorCode + "\n" + else: + statusText += errorColorCode + Environment.applicationName + " Backend is NOT running on this server" + defaultColorCode + "\n" else: - statusText += errorColorCode + Environment.applicationName + " Backend is NOT running on this server" + defaultColorCode + "\n" + statusText += errorColorCode + Environment.applicationName + " Backend is NOT running" + defaultColorCode + "\n" + except subprocess.CalledProcessError as e: + print(e) + if e.returncode == 1: + statusText += errorColorCode + Environment.applicationName + " Backend is NOT installed or NOT running" + defaultColorCode + "\n" else: - statusText += errorColorCode + Environment.applicationName + " Backend is NOT running" + defaultColorCode + "\n" - except subprocess.CalledProcessError as e: - print(e) - if e.returncode == 1: - statusText += errorColorCode + Environment.applicationName + " Backend is NOT installed or NOT running" + defaultColorCode + "\n" - else: + if EMTUtilities.isDebugMode(): + logging.exception("Error while checking for Backend running") + statusText += errorColorCode + "Error while checking for Backend running" + defaultColorCode + "\n" + except: if EMTUtilities.isDebugMode(): logging.exception("Error while checking for Backend running") statusText += errorColorCode + "Error while checking for Backend running" + defaultColorCode + "\n" - except: - if EMTUtilities.isDebugMode(): - logging.exception("Error while checking for Backend running") - statusText += errorColorCode + "Error while checking for Backend running" + defaultColorCode + "\n" # Ping hostnames from system.cfg if Environment.systemCfgProperties is not None and len(Environment.systemCfgProperties) > 0: @@ -400,41 +425,10 @@ def sendConfigAndLogsAction(actionParameters): if len(password) > 0: print("Creating config and logs data zip file. This may take a moment.") - if Environment.isOpenEmmServer: - applicationUserName = "openemm" - applicationUserTempDirectory = "/home/openemm/temp" - if not os.path.isdir(applicationUserTempDirectory): - EMTUtilities.createDirectory(applicationUserTempDirectory, "openemm") - elif Environment.isEmmFrontendServer: - applicationUserName = "console" - applicationUserTempDirectory = "/home/console/temp" - if not os.path.isdir(applicationUserTempDirectory): - EMTUtilities.createDirectory(applicationUserTempDirectory, "console") - elif Environment.isEmmRdirServer: - applicationUserName = "rdir" - applicationUserTempDirectory = "/home/rdir/temp" - if not os.path.isdir(applicationUserTempDirectory): - EMTUtilities.createDirectory(applicationUserTempDirectory, "rdir") - elif Environment.isEmmMergerServer: - applicationUserName = "merger" - applicationUserTempDirectory = "/home/merger/temp" - if not os.path.isdir(applicationUserTempDirectory): - EMTUtilities.createDirectory(applicationUserTempDirectory, "merger") - elif Environment.isEmmMailerServer: - applicationUserName = "mailout" - applicationUserTempDirectory = "/home/mailout/temp" - if not os.path.isdir(applicationUserTempDirectory): - EMTUtilities.createDirectory(applicationUserTempDirectory, "mailout") - elif Environment.isEmmMailloopServer: - applicationUserName = "mailloop" - applicationUserTempDirectory = "/home/mailloop/temp" - if not os.path.isdir(applicationUserTempDirectory): - EMTUtilities.createDirectory(applicationUserTempDirectory, "mailloop") - else: - applicationUserName = "console" - applicationUserTempDirectory = "/home/console/temp" - if not os.path.isdir(applicationUserTempDirectory): - EMTUtilities.createDirectory(applicationUserTempDirectory, "console") + applicationUserName = Environment.getApplicationUserName() + applicationUserTempDirectory = "/home/" + applicationUserName + "/temp" + if not os.path.isdir(applicationUserTempDirectory): + EMTUtilities.createDirectory(applicationUserTempDirectory, applicationUserName) configAndLogsZipFilePath = applicationUserTempDirectory + "/configAndLogs.zip" diff --git a/backend/src/script/tools/EMT_lib/BasicWebappMenu.py b/backend/src/script/tools/EMT_lib/BasicWebappMenu.py index d360dfd55..209b7ec4a 100644 --- a/backend/src/script/tools/EMT_lib/BasicWebappMenu.py +++ b/backend/src/script/tools/EMT_lib/BasicWebappMenu.py @@ -40,14 +40,6 @@ def basicWebappMenuAction(actionParameters): if Environment.isOpenEmmServer: applicationUserName = "openemm" - elif Environment.isEmmRdirServer: - applicationUserName = "rdir" - elif Environment.isEmmMergerServer: - applicationUserName = "merger" - elif Environment.isEmmMailerServer: - applicationUserName = "mailout" - elif Environment.isEmmMailloopServer: - applicationUserName = "mailloop" else: applicationUserName = "console" @@ -71,7 +63,7 @@ def basicWebappMenuAction(actionParameters): else: # Using the default from emm.sh currentTomcatNative = Environment.tomcatNativeEmmShDefault - if currentTomcatNative is None or currentTomcatNative == "" or EMTUtilities.getTomcatNativeVersion(currentTomcatNative) is None: + if EMTUtilities.isBlank(currentTomcatNative) or EMTUtilities.getTomcatNativeVersion(currentTomcatNative) is None: currentTomcatNative = None if environmentProperties is not None and len(environmentProperties) > 1: @@ -232,11 +224,13 @@ def configureJavaHome(environmentProperties): else: if Environment.isEmmFrontendServer or Environment.isEmmStatisticsServer or Environment.isEmmWebservicesServer: username = "console" - if Environment.isEmmConsoleRdirServer: + elif Environment.isEmmConsoleRdirServer: if Environment.isEmmRdirServer: username = "rdir" else: username = "console" + else: + username = "console" EMTUtilities.updateEnvironmentPropertiesFile(Environment.environmentConfigurationFilePath, username, environmentProperties) def configureCatalinaHome(environmentProperties): @@ -316,11 +310,13 @@ def configureCatalinaHome(environmentProperties): else: if Environment.isEmmFrontendServer or Environment.isEmmStatisticsServer or Environment.isEmmWebservicesServer: username = "console" - if Environment.isEmmConsoleRdirServer: + elif Environment.isEmmConsoleRdirServer: if Environment.isEmmRdirServer: username = "rdir" else: username = "console" + else: + username = "console" EMTUtilities.updateEnvironmentPropertiesFile(Environment.environmentConfigurationFilePath, username, environmentProperties) def configureTomcatnative(applicationUserName): @@ -344,7 +340,7 @@ def configureTomcatnative(applicationUserName): else: # Using the default from emm.sh currentTomcatNative = Environment.tomcatNativeEmmShDefault - if currentTomcatNative is None or currentTomcatNative == "" or EMTUtilities.getTomcatNativeVersion(currentTomcatNative) is None: + if EMTUtilities.isBlank(currentTomcatNative) or EMTUtilities.getTomcatNativeVersion(currentTomcatNative) is None: currentTomcatNative = None # Initial Tomcat-Native installation @@ -420,7 +416,7 @@ def configureTomcatnative(applicationUserName): print(Colors.RED + "Do NOT use command 'yum install tomcat-native', because it installs Apache Tomcat Native for Apache Tomcat 7, which is not compatible with " + Environment.applicationName + "." + Colors.DEFAULT) else: Environment.tomcatNative = tomcatNative - if Environment.tomcatNative is None or Environment.tomcatNative == "" or EMTUtilities.getTomcatNativeVersion(Environment.tomcatNative) is None: + if EMTUtilities.isBlank(Environment.tomcatNative) or EMTUtilities.getTomcatNativeVersion(Environment.tomcatNative) is None: Environment.tomcatNative = None environmentProperties = EMTUtilities.readEnvironmentPropertiesFile(Environment.environmentConfigurationFilePath) environmentProperties["TOMCAT_NATIVE"] = Environment.tomcatNative @@ -429,11 +425,13 @@ def configureTomcatnative(applicationUserName): else: if Environment.isEmmFrontendServer or Environment.isEmmStatisticsServer or Environment.isEmmWebservicesServer: username = "console" - if Environment.isEmmConsoleRdirServer: + elif Environment.isEmmConsoleRdirServer: if Environment.isEmmRdirServer: username = "rdir" else: username = "console" + else: + username = "console" EMTUtilities.updateEnvironmentPropertiesFile(Environment.environmentConfigurationFilePath, username, environmentProperties) def configureWkhtml(environmentProperties): @@ -631,11 +629,13 @@ def configureWkhtml(environmentProperties): else: if Environment.isEmmFrontendServer or Environment.isEmmStatisticsServer or Environment.isEmmWebservicesServer: username = "console" - if Environment.isEmmConsoleRdirServer: + elif Environment.isEmmConsoleRdirServer: if Environment.isEmmRdirServer: username = "rdir" else: username = "console" + else: + username = "console" EMTUtilities.updateEnvironmentPropertiesFile(Environment.environmentConfigurationFilePath, username, environmentProperties) # Store new WKHTML data in database config_tbl if DbConnector.checkDbConnection(): @@ -644,15 +644,17 @@ def configureWkhtml(environmentProperties): def configureProxy(environmentProperties): if Environment.isOpenEmmServer: - username = "openemm" + username = "openemm" else: if Environment.isEmmFrontendServer or Environment.isEmmStatisticsServer or Environment.isEmmWebservicesServer: username = "console" - if Environment.isEmmConsoleRdirServer: + elif Environment.isEmmConsoleRdirServer: if Environment.isEmmRdirServer: username = "rdir" else: username = "console" + else: + username = "console" print("Do you want to use a WebProxy? (N/y, Blank => No):") choice = input(" > ").strip().lower() @@ -674,7 +676,7 @@ def configureProxy(environmentProperties): else: environmentProperties.pop("PROXY", None) environmentProperties.pop("NO_PROXY_HOSTS", None) - + print("Do you want to override any system configured proxy? (N/y, Blank => No):") choice = input(" > ").strip().lower() if choice.startswith("y") or choice.startswith("j"): diff --git a/backend/src/script/tools/EMT_lib/CheckMenu.py b/backend/src/script/tools/EMT_lib/CheckMenu.py index 3185d4076..536738113 100644 --- a/backend/src/script/tools/EMT_lib/CheckMenu.py +++ b/backend/src/script/tools/EMT_lib/CheckMenu.py @@ -58,7 +58,7 @@ def executeCheck(): foundError = checkApplicationFileIntegrityWithOutput("Frontend", "/home/openemm/webapps/emm") or foundError foundError = checkApplicationFileIntegrityWithOutput("Statistics", "/home/openemm/webapps/statistics") or foundError foundError = checkApplicationFileIntegrityWithOutput("Webservices", "/home/openemm/webapps/webservices") or foundError - + if foundError: print(Colors.RED + "Errors found" + Colors.DEFAULT) else: @@ -120,9 +120,9 @@ def checkApplicationFileIntegrity(applicationDirectoryPath): if os.path.isfile(applicationDirectoryPath + "/checksums.sha256"): invalidFiles = ssh256FileIntegrityCheck(applicationDirectoryPath) if invalidFiles is None: - return False - else: return True + else: + return False else: return False diff --git a/backend/src/script/tools/EMT_lib/ConfigurationMenu.py b/backend/src/script/tools/EMT_lib/ConfigurationMenu.py index 1702e48df..16f7ae5c2 100644 --- a/backend/src/script/tools/EMT_lib/ConfigurationMenu.py +++ b/backend/src/script/tools/EMT_lib/ConfigurationMenu.py @@ -33,9 +33,13 @@ def configTableMenuAction(actionParameters): DbConnector.updateConfigurationValueInDB("system", "support_emergency_url", "", Environment.hostname) DbConnector.updateConfigurationValueInDB("webservices", "url", choice + "/2.0/", Environment.hostname) - birtKeysExist = DbConnector.selectValue("SELECT COUNT(*) FROM config_tbl WHERE class = 'birt' and name = 'privatekey' AND value != '[to be defined]'") > 0 + birtKeysExist = DbConnector.selectValue("SELECT COUNT(*) FROM config_tbl WHERE class = 'birt' and name = 'privatekey' AND value != '[to be defined]' AND (hostname IS NULL OR TRIM(hostname) = '' OR hostname = ?)", Environment.hostname) > 0 + + if Environment.isOpenEmmServer: + applicationUserName = "openemm" + else: + applicationUserName = "console" - applicationUserName = "openemm" if Environment.isOpenEmmServer else "console" print() if not birtKeysExist and not os.path.isfile("/home/" + applicationUserName + "/tomcat/conf/keys/birt_private.pem"): print("Generating Initial Statistics PPK keys") @@ -64,7 +68,7 @@ def configTableMenuAction(actionParameters): print() - currentConfigurationValues = DbConnector.readConfigurationFromDB() + currentConfigurationValues = DbConnector.readConfigurationFromDB(Environment.hostname) configurationValueDescriptions = { "system.url": "Url for " + Environment.applicationName + "-GUI", "system.defaultRdirDomain": "Default rdir domain for new created clients (Add the protocol, e.g. https://)", @@ -214,9 +218,9 @@ def systemCfgMenuAction(actionParameters): if Environment.unsavedSystemCfgChanges is not None and key in Environment.unsavedSystemCfgChanges: print(Colors.YELLOW + " " + key + " = " + Environment.unsavedSystemCfgChanges[key] + Colors.DEFAULT) elif key in Environment.readonlyLicenseCfgProperties: - print(Colors.RED + " " + key + " = " + value + Colors.DEFAULT) + print(Colors.RED + " " + key + " = " + str(value) + Colors.DEFAULT) else: - print(" " + key + " = " + value) + print(" " + key + " = " + str(value)) print() diff --git a/backend/src/script/tools/EMT_lib/DbConnector.py b/backend/src/script/tools/EMT_lib/DbConnector.py index 6b746e398..5192ac81f 100644 --- a/backend/src/script/tools/EMT_lib/DbConnector.py +++ b/backend/src/script/tools/EMT_lib/DbConnector.py @@ -133,8 +133,7 @@ def updateDbcfgPropertiesFile(filePath, changedDbcfgProperties): line = "" for key, value in list(oldProperties.items()): - if not (key == "secure" and value == "false"): - line = line + (", " if len(line) > 0 else "") + key + "=" + value + line = line + (", " if len(line) > 0 else "") + key + "=" + value line = dbEntryName + ": " + line dbcfgPropertiesNewData = dbcfgPropertiesNewData + line + "\n" dbcfgPropertiesData = dbcfgPropertiesNewData @@ -144,10 +143,10 @@ def updateDbcfgPropertiesFile(filePath, changedDbcfgProperties): propertiesFileHandle.write(dbcfgPropertiesData) else: raise Exception("Cannot update DbcfgProperties in readonly file '" + filePath + "'") - + dbcfgProperties = readDbcfgPropertiesFile(filePath) if dbcfgProperties[dbEntryName]["dbms"] == "oracle": - if "secure" in dbcfgPropertiesData[dbEntryName] and dbcfgPropertiesData[dbEntryName]["secure"] == "true": + if "secure" in dbcfgProperties[dbEntryName] and dbcfgProperties[dbEntryName]["secure"] == "true": if os.path.isdir("/home/openemm") and (os.getlogin() == "openemm" or EMTUtilities.hasRootPermissions()): setupSecureOracleDbLibrariesForStartup("openemm") if os.path.isdir("/home/console") and (os.getlogin() == "console" or EMTUtilities.hasRootPermissions()): @@ -162,10 +161,19 @@ def updateDbcfgPropertiesFile(filePath, changedDbcfgProperties): if os.path.isdir("/home/rdir") and (os.getlogin() == "rdir" or EMTUtilities.hasRootPermissions()): removeSecureOracleDbLibrariesForStartup("rdir") -def parseJdbcConnectionString(jdbcConnectionString): +# Example JDBC ConnectionStrings: +# jdbc:oracle:oci:@XE +# jdbc:oracle:thin:@127.0.0.1:1521:emm +# jdbc:oracle:thin:@test.domain.local:1521:EMM +# jdbc:oracle:thin:@//test.domain.local:1521/special.domain.local +# -- not parsed hereby jdbc:mariadb://mariadb/emm?zeroDateTimeBehavior=convertToNull&useUnicode=true&characterEncoding=UTF-8 +# -- not parsed hereby jdbc:mariadb://localhost/emm?zeroDateTimeBehavior=convertToNull&useUnicode=true&characterEncoding=UTF-8 +# -- not parsed hereby jdbc:mariadb://127.0.0.1:3306/emm?zeroDateTimeBehavior=convertToNull&useUnicode=true&characterEncoding=UTF-8 +def parseOracleJdbcConnectionString(jdbcConnectionString): if jdbcConnectionString is None or len(jdbcConnectionString.strip()) == 0: return None else: + jdbcConnectionString = jdbcConnectionString.replace("@//", "@") jdbcConnectionProperties = {} protocolEndIndex = jdbcConnectionString.find("@") if protocolEndIndex > 0: @@ -178,7 +186,7 @@ def parseJdbcConnectionString(jdbcConnectionString): # TNS names based SID jdbcConnectionProperties["sid"] = dbIdentifierParts[0] elif len(dbIdentifierParts) == 3: - # Format hostname:port:dbname + # Format "hostname:port:dbname" host = dbIdentifierParts[0] while host.startswith("/"): host = host[1:] @@ -470,7 +478,7 @@ def checkDbServiceAvailable(): return False if dbcfgEntry["dbms"] is not None and dbcfgEntry["dbms"].lower() == "oracle": - jdbcConnectProperties = parseJdbcConnectionString(dbcfgEntry["jdbc-connect"]) + jdbcConnectProperties = parseOracleJdbcConnectionString(dbcfgEntry["jdbc-connect"]) if jdbcConnectProperties is None: return False @@ -572,6 +580,33 @@ def checkDbExists(databaseName): return False def checkDbStructureExists(): + return checkTableExists("agn_dbversioninfo_tbl"); + +def checkTableExists(tableName): + connection = None + cursor = None + try: + connection = openDbConnection() + if connection is None: + raise Exception("Cannot establish db connection") + cursor = connection.cursor() + + cursor.execute("SELECT COUNT(*) FROM " + tableName + " WHERE 1 = 0") + + for row in cursor: + if row[0] >= 0: + return True + return False + except: + return False + finally: + if cursor is not None: + cursor.close() + if connection is not None: + connection.commit() + connection.close() + +def checkColumnExists(tableName, columnName): connection = None cursor = None try: @@ -580,7 +615,7 @@ def checkDbStructureExists(): raise Exception("Cannot establish db connection") cursor = connection.cursor() - cursor.execute("SELECT COUNT(*) FROM agn_dbversioninfo_tbl") + cursor.execute("SELECT " + columnName + " FROM " + tableName + " WHERE 1 = 0") for row in cursor: if row[0] >= 0: @@ -708,15 +743,25 @@ def createDatabaseAndUser(host, dbname, username, userpassword, dbRootPassword = sqlUpdateReturnCode = os.system(getDbClientPath() + " -u root -h " + host + passwordParameterPart + " --default-character-set=utf8 -e \"FLUSH PRIVILEGES\"") return sqlUpdateReturnCode == 0 -def readConfigurationFromDB(): +def readConfigurationFromDB(hostname): configurationValues = [] - result = select("SELECT class, name, value, hostname FROM config_tbl ORDER BY class, name") + result = select("SELECT class, name, value, hostname FROM config_tbl WHERE hostname IS NULL OR TRIM(hostname) = '' OR hostname = ? ORDER BY class, name", hostname) for row in result: className = row[0] configName = row[1] value = row[2] - hostname = row[3] - configurationValues.append({"class": className, "name": configName, "value": value, "hostname": hostname}) + valueHostname = row[3] + + foundValue = False + if valueHostname == hostname: + for configurationValue in configurationValues: + if configurationValue["class"] == className and configurationValue["name"] == configName: + configurationValue["value"] = value + configurationValue["hostname"] = hostname + foundValue = True + break + if not foundValue: + configurationValues.append({"class": className, "name": configName, "value": value, "hostname": valueHostname}) return configurationValues def readConfigurationValueFromDB(configClass, configName, hostname): @@ -745,7 +790,7 @@ def readConfigurationValueFromDB(configClass, configName, hostname): def updateConfigurationValueInDB(configClass, configName, configValue, hostname): # Check if entry already exists. SQL-Update of an entry with same value returns rowcount 0, so a SQL-Select is used. if configValue == "": - update("DELETE FROM config_tbl WHERE class = ? AND name = ? AND (hostname = ? OR hostname IS NULL)", configClass, configName, hostname) + update("DELETE FROM config_tbl WHERE class = ? AND name = ? AND (hostname IS NULL OR TRIM(hostname) = '' OR hostname = ?)", configClass, configName, hostname) else: itemExists = selectValue("SELECT COUNT(*) FROM config_tbl WHERE class = ? AND name = ? AND hostname = ?", configClass, configName, hostname) > 0 if itemExists: @@ -771,9 +816,9 @@ def readJobQueueHostsFromDB(): def storeJobQueueHostInDB(hostName, status): if hostName == "*" or hostName is None: - itemExists = selectValue("SELECT COUNT(*) FROM config_tbl WHERE class = 'jobqueue' AND name = 'execute' AND (hostname IS NULL OR hostname = '')") > 0 + itemExists = selectValue("SELECT COUNT(*) FROM config_tbl WHERE class = 'jobqueue' AND name = 'execute' AND (hostname IS NULL OR TRIM(hostname) = '')") > 0 if itemExists: - update("UPDATE config_tbl SET change_date = CURRENT_TIMESTAMP, description = 'Changed by Maintenance Tool', value = ? WHERE class = 'jobqueue' AND name = 'execute' AND (hostname IS NULL OR hostname = '')", 1 if status else 0) + update("UPDATE config_tbl SET change_date = CURRENT_TIMESTAMP, description = 'Changed by Maintenance Tool', value = ? WHERE class = 'jobqueue' AND name = 'execute' AND (hostname IS NULL OR TRIM(hostname) = '')", 1 if status else 0) else: update("INSERT INTO config_tbl (class, name, hostname, value, creation_date, change_date, description) VALUES ('jobqueue', 'execute', NULL, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, 'Changed by Maintenance Tool')", 1 if status else 0) else: @@ -785,7 +830,7 @@ def storeJobQueueHostInDB(hostName, status): def removeJobQueueHostFromDB(hostName): if hostName == "*" or hostName is None: - update("DELETE FROM config_tbl WHERE class = 'jobqueue' AND name = 'execute' AND (hostname IS NULL OR hostname = '')") + update("DELETE FROM config_tbl WHERE class = 'jobqueue' AND name = 'execute' AND (hostname IS NULL OR TRIM(hostname) = '')") else: update("DELETE FROM config_tbl WHERE class = 'jobqueue' AND name = 'execute' AND hostname = ?", hostName) @@ -921,7 +966,7 @@ def setupSecureOracleDbLibrariesForStartup(username): if not "# OracleDB secure connection setup: start" in additionalPropertiesData: additionalPropertiesData = additionalPropertiesData\ + "# OracleDB secure connection setup: start\n"\ - + "rm -f ~/tomcat/lib/ojdbc8.jar\n"\ + + "rm -f ~/tomcat/lib/ojdbc8*.jar\n"\ + "rm -f ~/tomcat/lib/orai18n.jar\n"\ + "rm -f ~/tomcat/lib/ucp.jar\n"\ + "rm -f ~/tomcat/lib/xstreams.jar\n"\ diff --git a/backend/src/script/tools/EMT_lib/DbManagementMenu.py b/backend/src/script/tools/EMT_lib/DbManagementMenu.py index 27473fb03..3d52bea0e 100644 --- a/backend/src/script/tools/EMT_lib/DbManagementMenu.py +++ b/backend/src/script/tools/EMT_lib/DbManagementMenu.py @@ -39,8 +39,17 @@ def dbcfgMenuAction(actionParameters): print("Database configuration for " + dbEntryName + ":") - if not "secure" in DbConnector.dbcfgProperties[dbEntryName] and DbConnector.dbcfgProperties[dbEntryName]["dbms"] == "oracle": - DbConnector.dbcfgProperties[dbEntryName]["secure"] = "false" + if DbConnector.dbcfgProperties[dbEntryName]["dbms"] == "oracle": + if not "secure" in DbConnector.dbcfgProperties[dbEntryName]: + if Environment.unsavedDbcfgChanges is None: + Environment.unsavedDbcfgChanges = {} + Environment.unsavedDbcfgChanges["dbEntryName"] = dbEntryName + Environment.unsavedDbcfgChanges["secure"] = "false" + if not "tablespaces" in DbConnector.dbcfgProperties[dbEntryName]: + if Environment.unsavedDbcfgChanges is None: + Environment.unsavedDbcfgChanges = {} + Environment.unsavedDbcfgChanges["dbEntryName"] = dbEntryName + Environment.unsavedDbcfgChanges["tablespaces"] = "true" for key, value in sorted(DbConnector.dbcfgProperties[dbEntryName].items()): color = "" @@ -50,6 +59,11 @@ def dbcfgMenuAction(actionParameters): print(Colors.YELLOW + " " + key + " = " + Environment.unsavedDbcfgChanges[key] + Colors.DEFAULT) else: print(" " + key + " = " + value) + if Environment.unsavedDbcfgChanges is not None: + for key, value in sorted(Environment.unsavedDbcfgChanges.items()): + if key != "dbEntryName" and key not in DbConnector.dbcfgProperties[dbEntryName]: + print(Colors.YELLOW + " " + key + " = " + Environment.unsavedDbcfgChanges[key] + Colors.DEFAULT) + print() @@ -69,22 +83,6 @@ def dbcfgMenuAction(actionParameters): Environment.errors.append("File is readonly: " + DbConnector.dbcfgPropertiesFilePath) return False - if ((not "jdbc-connect" in DbConnector.dbcfgProperties[dbEntryName]) or DbConnector.dbcfgProperties[dbEntryName]["jdbc-connect"] is None or DbConnector.dbcfgProperties[dbEntryName]["jdbc-connect"] == ""): - if Environment.unsavedDbcfgChanges["dbms"] == "oracle": - Environment.unsavedDbcfgChanges["jdbc-connect"] = "[to be defined]" - elif Environment.unsavedDbcfgChanges["dbms"] == "mariadb": - Environment.unsavedDbcfgChanges["jdbc-connect"] = "jdbc:mariadb://" + DbConnector.dbcfgProperties[dbEntryName]["host"] + "/" + DbConnector.dbcfgProperties[dbEntryName]["name"] + "?zeroDateTimeBehavior=convertToNull&useUnicode=true&characterEncoding=UTF-8" - elif Environment.unsavedDbcfgChanges["dbms"] == "mysql": - Environment.unsavedDbcfgChanges["jdbc-connect"] = "jdbc:mysql://" + DbConnector.dbcfgProperties[dbEntryName]["host"] + "/" + DbConnector.dbcfgProperties[dbEntryName]["name"] + "?zeroDateTimeBehavior=convertToNull&useUnicode=true&characterEncoding=UTF-8" - - if (not "jdbc-driver" in DbConnector.dbcfgProperties[dbEntryName]) or DbConnector.dbcfgProperties[dbEntryName]["jdbc-driver"] is None or DbConnector.dbcfgProperties[dbEntryName]["jdbc-driver"] == "": - if Environment.unsavedDbcfgChanges["dbms"] == "oracle": - Environment.unsavedDbcfgChanges["jdbc-driver"] = "[to be defined]" - elif Environment.unsavedDbcfgChanges["dbms"] == "mariadb": - Environment.unsavedDbcfgChanges["jdbc-driver"] = "org.mariadb.jdbc.Driver" - elif Environment.unsavedDbcfgChanges["dbms"] == "mysql": - Environment.unsavedDbcfgChanges["jdbc-driver"] = "com.mysql.cj.jdbc.Driver" - try: DbConnector.updateDbcfgPropertiesFile(DbConnector.dbcfgPropertiesFilePath, Environment.unsavedDbcfgChanges) DbConnector.dbcfgProperties = DbConnector.readDbcfgPropertiesFile(DbConnector.dbcfgPropertiesFilePath) @@ -112,7 +110,7 @@ def dbcfgMenuAction(actionParameters): else: Environment.messages.append("New database and user credentials created.") - if Environment.getSystemUrl() is None or Environment.getSystemUrl().strip() == "" or Environment.getSystemUrl().strip() == "Unknown" and DbConnector.checkDbServiceAvailable() and DbConnector.checkDbStructureExists(): + if EMTUtilities.isBlank(Environment.getSystemUrl()) or Environment.getSystemUrl().strip() == "Unknown" and DbConnector.checkDbServiceAvailable() and DbConnector.checkDbStructureExists(): Environment.errors.append("Basic configuration is missing. Please configure.") Environment.overrideNextMenu = Environment.configTableMenu @@ -128,6 +126,8 @@ def dbcfgMenuAction(actionParameters): print("Please enter new value for key '" + dbcfgKey + "' (Allowed values are '" + "', '".join(Environment.allowedDbmsSystems) + "'): ") elif dbcfgKey == "secure": print("Please enter new value for key '" + dbcfgKey + "' (Allowed values are 'true' or 'false'): ") + elif dbcfgKey == "tablespaces": + print("Please enter new value for key '" + dbcfgKey + "' (Allowed values are 'true' or 'false'): ") else: print("Please enter new value for key '" + dbcfgKey + "': ") dbcfgValue = input(" > ") @@ -135,8 +135,8 @@ def dbcfgMenuAction(actionParameters): Environment.errors.append("Invalid dbms-type '" + dbcfgValue + "' for key '" + dbcfgKey + "'. Only '" + "', '".join(Environment.allowedDbmsSystems) + "' allowed.") elif dbcfgKey == "secure" and not dbcfgValue in ["true", "false"]: Environment.errors.append("Invalid secure attribute '" + dbcfgValue + "' for key '" + dbcfgKey + "'. Only 'true' or 'false' allowed.") - elif "," in dbcfgValue: - Environment.errors.append("Invalid ',' character in new value '" + dbcfgValue + "' for key '" + dbcfgKey + "'") + elif dbcfgKey == "tablespaces" and not dbcfgValue in ["true", "false"]: + Environment.errors.append("Invalid secure attribute '" + dbcfgValue + "' for key '" + dbcfgKey + "'. Only 'true' or 'false' allowed.") else: dbcfgValue = dbcfgValue.strip() DbConnector.dbcfgProperties[dbEntryName][dbcfgKey] = dbcfgValue @@ -145,6 +145,27 @@ def dbcfgMenuAction(actionParameters): Environment.unsavedDbcfgChanges["dbEntryName"] = dbEntryName Environment.unsavedDbcfgChanges[dbcfgKey] = dbcfgValue + if dbcfgKey == "dbms": + # Use default values + if Environment.unsavedDbcfgChanges["dbms"] == "oracle": + Environment.unsavedDbcfgChanges["jdbc-connect"] = "jdbc:oracle:thin:@[to be defined]:1521:emm" + Environment.unsavedDbcfgChanges["jdbc-driver"] = "oracle.jdbc.driver.OracleDriver" + Environment.unsavedDbcfgChanges["sid"] = "[to be defined]" + Environment.unsavedDbcfgChanges["secure"] = "false" + Environment.unsavedDbcfgChanges["tablespaces"] = "true" + elif Environment.unsavedDbcfgChanges["dbms"] == "mariadb": + Environment.unsavedDbcfgChanges["jdbc-connect"] = "jdbc:mariadb://localhost/" + Environment.applicationName.lower() + "?zeroDateTimeBehavior=convertToNull&useUnicode=true&characterEncoding=UTF-8" + Environment.unsavedDbcfgChanges["jdbc-driver"] = "org.mariadb.jdbc.Driver" + Environment.unsavedDbcfgChanges["secure"] = "false" + Environment.unsavedDbcfgChanges.pop("sid") + Environment.unsavedDbcfgChanges.pop("tablespaces") + elif Environment.unsavedDbcfgChanges["dbms"] == "mysql": + Environment.unsavedDbcfgChanges["jdbc-connect"] = "jdbc:mysql://localhost/" + Environment.applicationName.lower() + "?zeroDateTimeBehavior=convertToNull&useUnicode=true&characterEncoding=UTF-8" + Environment.unsavedDbcfgChanges["jdbc-driver"] = "com.mysql.cj.jdbc.Driver" + Environment.unsavedDbcfgChanges["secure"] = "false" + Environment.unsavedDbcfgChanges.pop("sid") + Environment.unsavedDbcfgChanges.pop("tablespaces") + return True else: Environment.errors.append("Invalid database entry key name: " + choice) diff --git a/backend/src/script/tools/EMT_lib/EMTUtilities.py b/backend/src/script/tools/EMT_lib/EMTUtilities.py index 00a915fdf..8a4c2bf0e 100644 --- a/backend/src/script/tools/EMT_lib/EMTUtilities.py +++ b/backend/src/script/tools/EMT_lib/EMTUtilities.py @@ -16,6 +16,7 @@ import smtplib import subprocess import logging +import glob import urllib.request, urllib.error, urllib.parse from email.mime.application import MIMEApplication @@ -126,6 +127,13 @@ def sslIsAvailable(): except: return False +def chown(filePath, userName = None, groupName = None): + if groupName is not None or (userName is not None and userName != os.environ["USER"]): + if groupName is not None: + os.system("chown -h " + userName + ":" + groupName + " " + filePath) + else: + os.system("chown -h " + userName + " " + filePath) + def createLink(referencedPath, linkPath, userName = None, groupName = None): os.symlink(referencedPath, linkPath) if groupName is not None or (userName is not None and userName != os.environ["USER"]): @@ -180,7 +188,7 @@ def printTextInBox(text, boxChar="*"): print(boxChar * (len(text) + 4)) def checkJavaAvailable(javaHome): - if javaHome is None or javaHome == "" or not os.path.isfile(javaHome + "/bin/java"): + if isBlank(javaHome) or not os.path.isfile(javaHome + "/bin/java"): return False try: @@ -191,7 +199,7 @@ def checkJavaAvailable(javaHome): return False def getJavaVersion(javaHome): - if javaHome is None or javaHome == "" or not os.path.isfile(javaHome + "/bin/java"): + if isBlank(javaHome) or not os.path.isfile(javaHome + "/bin/java"): return None try: @@ -205,7 +213,7 @@ def getJavaVersion(javaHome): return None def getJavaVendor(javaHome): - if javaHome is None or javaHome == "" or not os.path.isfile(javaHome + "/bin/java"): + if isBlank(javaHome) or not os.path.isfile(javaHome + "/bin/java"): return None try: @@ -470,9 +478,9 @@ def zipFile(zipFilePath, filePathToAddToZipFile, password=""): os.system(bashCommand) def unzipFile(zipFilePath, directoryPathToExtractTo): - if zipFilePath is None or zipFilePath.strip() == "": + if isBlank(zipFilePath): raise ValueError("Invalid empty parameter zipFilePath") - elif directoryPathToExtractTo is None or directoryPathToExtractTo.strip() == "": + elif isBlank(directoryPathToExtractTo): raise ValueError("Invalid empty parameter directoryPathToExtractTo") elif directoryPathToExtractTo.endswith("/"): directoryPathToExtractTo = directoryPathToExtractTo[:-1] @@ -516,7 +524,7 @@ def getSendmailVersion(): sendmailPath = subprocess.check_output("which sendmail 2>/dev/null", shell=True).decode("UTF-8").strip() except: sendmailPath = None - if sendmailPath is None or sendmailPath == "": + if isBlank(sendmailPath): return None else: try: @@ -524,7 +532,7 @@ def getSendmailVersion(): except: rpmPath = None - if rpmPath is None or rpmPath == "": + if isBlank(rpmPath): logging.exception("Cannot detect SendmailVersion: rpm is missing") return None else: @@ -545,7 +553,7 @@ def getPostfixVersion(): postfixPath = subprocess.check_output("which postconf 2>/dev/null", shell=True).decode("UTF-8").strip() except: postfixPath = None - if postfixPath is None or postfixPath == "": + if isBlank(postfixPath): return None else: postfixVersion = subprocess.check_output("postconf -d mail_version 2>/dev/null", shell=True).decode("UTF-8") @@ -668,8 +676,8 @@ def removeContentFromFile(filePath, startSign, endSign=None): startSignFound = False with open(filePath, "r", encoding="UTF-8") as fileHandle: fileData = fileHandle.read() - startIndex = fileData.index(startSign) - while startIndex > -1: + while startSign in fileData: + startIndex = fileData.index(startSign) startSignFound = True endIndex = -1 if endSign is not None: @@ -678,10 +686,104 @@ def removeContentFromFile(filePath, startSign, endSign=None): fileData = fileData[0:startIndex] + fileData[endIndex + len(endSign)] else: fileData = fileData[0:startIndex] - startIndex = fileData.index(startSign) if startSignFound: with open(filePath, "w", encoding="UTF-8") as fileHandle: fileHandle.write(fileData) return startSignFound else: raise Exception("No such file: " + filePath) + +def getSingleFileByPattern(filePathPattern): + filesList = glob.glob(filePathPattern) + if filesList != None and len(filesList) == 1: + return filesList[0] + else: + return None + +def manageTlsCertificateForTomcat(defaultTlsCertificateDirectory, filePathServerXml, applicationName): + with open(filePathServerXml, "r") as file: + serverXmlContent = file.read() + + activateTls = True + if "TLS configuration start" in serverXmlContent and "TLS configuration end" in serverXmlContent: + print("Do you want to activate a TLS certificate (https) for " + applicationName + "? (N/y, Blank => Cancel):") + answer = input(" > ").lower().strip() + if answer.startswith("y") or answer.startswith("j"): + activateTls = True + else: + activateTls = False + if activateTls: + defaultCertificateFilePath = getSingleFileByPattern(defaultTlsCertificateDirectory + "/*.crt") + while True: + if defaultCertificateFilePath == None: + print("Please enter path to TLS certificate file (.crt) (Blank => Cancel):") + else: + print("Please enter path to TLS certificate file (.crt) (Blank => '" + defaultCertificateFilePath + "', 'cancel' => Cancel):") + certificateFilePath = input(" > ").strip() + if isBlank(certificateFilePath): + if defaultCertificateFilePath != None: + certificateFilePath = defaultCertificateFilePath + else: + return + elif "cancel" == certificateFilePath.lower(): + return + if not os.path.isfile(certificateFilePath): + print("File does not exist: " + certificateFilePath) + print() + else: + break + + defaultKeyFilePath = getSingleFileByPattern(defaultTlsCertificateDirectory + "/*.key") + while True: + if defaultKeyFilePath == None: + print("Please enter path to TLS key file (.key) (Blank => Cancel):") + else: + print("Please enter path to TLS key file (.key) (Blank => '" + defaultKeyFilePath + "', 'cancel' => Cancel):") + keyFilePath = input(" > ").strip() + if isBlank(keyFilePath): + if defaultKeyFilePath != None: + keyFilePath = defaultKeyFilePath + else: + return + elif "cancel" == keyFilePath.lower(): + return + if not os.path.isfile(keyFilePath): + print("File does not exist: " + keyFilePath) + print() + else: + break + + defaultChainFilePath = getSingleFileByPattern(defaultTlsCertificateDirectory + "/*.ca-bundle") + while True: + if defaultChainFilePath == None: + print("Please enter path to TLS certificate CA chain file (.ca-bundle) (Blank => Cancel):") + else: + print("Please enter path to TLS certificate CA chain file (.ca-bundle) (Blank => '" + defaultChainFilePath + "', 'cancel' => Cancel):") + chainFilePath = input(" > ").strip() + if isBlank(chainFilePath): + if defaultChainFilePath != None: + chainFilePath = defaultChainFilePath + else: + return + elif "cancel" == chainFilePath.lower(): + return + if not os.path.isfile(chainFilePath): + print("File does not exist: " + chainFilePath) + print() + else: + break + + serverXmlContent = re.sub(r".*", serverXmlContent) + serverXmlContent = re.sub(r".*TLS configuration end -->.*", "\t\t", serverXmlContent) + + serverXmlContent = re.sub(r"certificateChainFile=\".*\"", 'certificateChainFile="' + chainFilePath + '"', serverXmlContent) + serverXmlContent = re.sub(r"certificateFile=\".*\"", 'certificateFile="' + certificateFilePath + '"', serverXmlContent) + serverXmlContent = re.sub(r"certificateKeyFile=\".*\"", 'certificateKeyFile="' + keyFilePath + '"', serverXmlContent) + else: + serverXmlContent = re.sub(r".*.*", "\t\tTLS configuration end -->", serverXmlContent) + + with open(filePathServerXml, "w", encoding="UTF-8") as file: + file.write(serverXmlContent) + print("Successfully configured TLS certificate") + print() diff --git a/backend/src/script/tools/EMT_lib/Environment.py b/backend/src/script/tools/EMT_lib/Environment.py index 0689b2f33..12841e094 100644 --- a/backend/src/script/tools/EMT_lib/Environment.py +++ b/backend/src/script/tools/EMT_lib/Environment.py @@ -24,10 +24,11 @@ class Environment: # OpenEMM and EMM specific settings start here # ################################################ - toolVersion = "22.10.024" + toolVersion = "23.04.032" toolName = None applicationName = None + scriptFilePath = None applicationUserNamesToCheck = None applicationDbcfgEntryDefaultName = None @@ -72,7 +73,6 @@ class Environment: agnitasCloudUrlReachable = False hostname = os.uname()[1] - scriptFilePath = os.path.dirname(os.path.realpath(__file__)) systemUrl = None username = os.environ["USER"] frontendUserName = None @@ -91,7 +91,7 @@ class Environment: errors = [] unsavedDbcfgChanges = None - readonlyDbcfgProperties = ["host"] + readonlyDbcfgProperties = [] unsavedSystemCfgChanges = None readonlyLicenseCfgProperties = ["licence"] @@ -695,7 +695,7 @@ def readSystemValues(): tomcatNative = "" tomcatNative = "/opt/agnitas.com/software/tomcat-native" environmentConfigurationFileData += "export TOMCAT_NATIVE=" + tomcatNative + "\n" - + environmentConfigurationFileData += "source $HOME/scripts/config.sh\n" environmentConfigurationFileHandle.write(environmentConfigurationFileData) @@ -922,7 +922,7 @@ def readSystemValues(): os.remove("/home/" + Environment.username + "/bin/EmmMaintenanceTool.sh") if os.path.islink("/home/" + Environment.username + "/bin/EmmMaintenanceTool.sh"): os.remove("/home/" + Environment.username + "/bin/EmmMaintenanceTool.sh") - + Environment.checkMigration() @staticmethod @@ -949,6 +949,8 @@ def readApplicationValues(): EMTUtilities.createLink("/home/openemm/tomcat/conf", "/home/openemm/conf", "openemm") if not os.path.isfile("/home/openemm/tomcat/conf/server.xml") and os.path.isfile("/home/openemm/tomcat/conf/server.xml.template"): shutil.copy("/home/openemm/tomcat/conf/server.xml.template", "/home/openemm/tomcat/conf/server.xml") + EMTUtilities.chown("/home/openemm/tomcat/conf/server.xml", "openemm", "openemm") + EMTUtilities.manageTlsCertificateForTomcat("/home/openemm/etc/ssl", "/home/openemm/tomcat/conf/server.xml", Environment.applicationName) if not os.path.isdir("/home/openemm/logs") and not os.path.islink("/home/openemm/logs"): EMTUtilities.createLink("/home/openemm/tomcat/logs", "/home/openemm/logs", "openemm") if os.path.isdir("/home/openemm/release/backend/current"): @@ -1012,6 +1014,8 @@ def readApplicationValues(): EMTUtilities.createLink("/home/console/tomcat/conf", "/home/console/conf", "console") if not os.path.isfile("/home/console/tomcat/conf/server.xml") and os.path.isfile("/home/console/tomcat/conf/server.xml.template"): shutil.copy("/home/console/tomcat/conf/server.xml.template", "/home/console/tomcat/conf/server.xml") + EMTUtilities.chown("/home/openemm/tomcat/conf/server.xml", "console", "console") + EMTUtilities.manageTlsCertificateForTomcat("/home/console/sslcert", "/home/console/tomcat/conf/server.xml", Environment.applicationName) if not os.path.isdir("/home/console/logs") and not os.path.islink("/home/console/logs"): EMTUtilities.createLink("/home/console/tomcat/logs", "/home/console/logs", "console") @@ -1104,6 +1108,8 @@ def readApplicationValues(): EMTUtilities.createLink("/home/rdir/tomcat/conf", "/home/rdir/conf", "rdir") if not os.path.isfile("/home/rdir/tomcat/conf/server.xml") and os.path.isfile("/home/rdir/tomcat/conf/server.xml.template"): shutil.copy("/home/rdir/tomcat/conf/server.xml.template", "/home/rdir/tomcat/conf/server.xml") + EMTUtilities.chown("/home/openemm/tomcat/conf/server.xml", "rdir", "rdir") + EMTUtilities.manageTlsCertificateForTomcat("/home/rdir/sslcert", "/home/rdir/tomcat/conf/server.xml", Environment.applicationName) if not os.path.isdir("/home/rdir/logs") and not os.path.islink("/home/rdir/logs"): EMTUtilities.createLink("/home/rdir/tomcat/logs", "/home/rdir/logs", "rdir") if os.path.isdir("/home/rdir/release/backend/current"): @@ -1170,8 +1176,8 @@ def readApplicationValues(): @staticmethod def getSystemUrl(): try: - if (Environment.systemUrl is None or Environment.systemUrl == "Unknown") and DbConnector.checkDbServiceAvailable(): - Environment.systemUrl = DbConnector.selectValue("SELECT value FROM config_tbl WHERE class = 'system' AND name = 'url' AND (hostname IS NULL OR TRIM(hostname) = '')") + if (Environment.systemUrl is None or Environment.systemUrl == "Unknown") and DbConnector.checkDbConnection(): + Environment.systemUrl = DbConnector.selectValue("SELECT value FROM config_tbl WHERE class = 'system' AND name = 'url' AND (hostname IS NULL OR TRIM(hostname) = '' OR hostname = ?)", Environment.hostname) if "[to be defined]" in Environment.systemUrl or Environment.systemUrl.strip() == "": Environment.systemUrl = None @@ -1185,8 +1191,9 @@ def getSystemUrl(): @staticmethod def checkMigration(): - if Environment.applicationName == "EMM": - if DbConnector.checkDbServiceAvailable(): + try: + # To be removed in near future: GWUA-5514 + if DbConnector.checkDbConnection() and DbConnector.checkColumnExists("workflow_tbl", "is_legacy_mode"): problematicInActiveWorkflows = DbConnector.select("SELECT shortname, company_id, workflow_id FROM workflow_tbl WHERE status != 2 AND workflow_id IN (SELECT workflow_id FROM workflow_reaction_tbl WHERE is_legacy_mode = 1)") if len(problematicInActiveWorkflows) > 0: listOfWorkflowNames = "" @@ -1197,7 +1204,7 @@ def checkMigration(): if len(listOfWorkflowNames) > 0: listOfWorkflowNames = listOfWorkflowNames + ", " listOfWorkflowNames = listOfWorkflowNames + "\"" + shortname + "\" (clientID: " + str(companyID) + ", workflowID: " + str(workflowID) + ")" - Environment.warnings.append("Your EMM database contains inactive legacy workflows " + listOfWorkflowNames + ". Please delete those workflows since they will no longer work with EMM 23.10 and later.") + Environment.warnings.append("Your " + Environment.applicationName + " database contains inactive legacy workflows " + listOfWorkflowNames + ". Please delete those workflows since they will no longer work with " + Environment.applicationName + " 23.10 and later.") problematicActiveWorkflows = DbConnector.select("SELECT shortname, company_id, workflow_id FROM workflow_tbl WHERE status = 2 AND workflow_id IN (SELECT workflow_id FROM workflow_reaction_tbl WHERE is_legacy_mode = 1)") if len(problematicActiveWorkflows) > 0: @@ -1209,8 +1216,8 @@ def checkMigration(): if len(listOfWorkflowNames) > 0: listOfWorkflowNames = listOfWorkflowNames + ", " listOfWorkflowNames = listOfWorkflowNames + "\"" + shortname + "\" (clientID: " + str(companyID) + ", workflowID: " + str(workflowID) + ")" - Environment.warnings.append("Your EMM database contains active legacy workflows " + listOfWorkflowNames + " which can not be migrated. Please rebuild these workflows from scratch and delete the legacy workflows since they will no longer work with EMM 23.10 and later. If you need help, please contact support.") - + Environment.warnings.append("Your " + Environment.applicationName + " database contains active legacy workflows " + listOfWorkflowNames + " which can not be migrated. Please rebuild these workflows from scratch and delete the legacy workflows since they will no longer work with " + Environment.applicationName + " 23.10 and later. If you need help, please contact support.") + problematicWorkflowsWithReportIcon = DbConnector.select("SELECT shortname, company_id, workflow_id FROM workflow_tbl WHERE LOWER(workflow_schema) LIKE '%type\":\"report%'") if problematicWorkflowsWithReportIcon is not None and len(problematicWorkflowsWithReportIcon) > 0: listOfWorkflowNames = "" @@ -1221,4 +1228,28 @@ def checkMigration(): if len(listOfWorkflowNames) > 0: listOfWorkflowNames = listOfWorkflowNames + ", " listOfWorkflowNames = listOfWorkflowNames + "\"" + shortname + "\" (clientID: " + str(companyID) + ", workflowID: " + str(workflowID) + ")" - Environment.warnings.append("Your EMM database contains campaign workflows " + listOfWorkflowNames + " which contain a report icon. Please remove the report icon from these workflows since they will no longer work with EMM 23.10 and later. If you need help, please contact support.") + Environment.warnings.append("Your " + Environment.applicationName + " database contains campaign workflows " + listOfWorkflowNames + " which contain a report icon. Please remove the report icon from these workflows since they will no longer work with " + Environment.applicationName + " 23.10 and later. If you need help, please contact support.") + + oldFormDoLinks = DbConnector.selectValue("SELECT COUNT(*) FROM rdir_url_tbl WHERE full_url like '%/form.do%'") + if oldFormDoLinks is not None and oldFormDoLinks > 0: + Environment.warnings.append("Your " + Environment.applicationName + " database contains userform links in mailings of the old '.../form.do?...' format. Please switch those to the new format '.../form.action?...'. If you need help, please contact support.") + except Exception as e: + if EMTUtilities.isDebugMode(): + logging.exception("Cannot check migration") + if hasattr(e, 'message'): + Environment.errors.append("Cannot check migration: " + str(e.message)) + else: + Environment.errors.append("Cannot check migration") + + @staticmethod + def getBackendApplicationUserName(): + if Environment.isOpenEmmServer: + return "openemm" + elif Environment.isEmmMergerServer: + return "merger" + elif Environment.isEmmMailerServer: + return "mailout" + elif Environment.isEmmMailloopServer: + return "mailloop" + else: + return "merger" diff --git a/backend/src/script/tools/EMT_lib/InstallAndUpdateMenu.py b/backend/src/script/tools/EMT_lib/InstallAndUpdateMenu.py index 150e7d8b7..6258e2584 100644 --- a/backend/src/script/tools/EMT_lib/InstallAndUpdateMenu.py +++ b/backend/src/script/tools/EMT_lib/InstallAndUpdateMenu.py @@ -19,12 +19,14 @@ import getpass import shutil import logging +import tempfile import urllib.request, urllib.error, urllib.parse from EMT_lib.Environment import Environment from EMT_lib import Colors from EMT_lib import DbConnector from EMT_lib import EMTUtilities +from EMT_lib import License from EMT_lib import SupplementalMenu @@ -485,16 +487,15 @@ def installFile(packageFilePath, logfile, interactive): if Environment.environmentConfigurationFilePath != None and os.path.isfile(Environment.environmentConfigurationFilePath): environmentProperties = EMTUtilities.readEnvironmentPropertiesFile(Environment.environmentConfigurationFilePath) environmentProperties["TOMCAT_NATIVE"] = Environment.tomcatNative - if Environment.isOpenEmmServer: - username = "openemm" - else: - if Environment.isEmmFrontendServer or Environment.isEmmStatisticsServer or Environment.isEmmWebservicesServer: + if Environment.isEmmFrontendServer or Environment.isEmmStatisticsServer or Environment.isEmmWebservicesServer: + username = "console" + elif Environment.isEmmConsoleRdirServer: + if Environment.isEmmRdirServer: + username = "rdir" + else: username = "console" - if Environment.isEmmConsoleRdirServer: - if Environment.isEmmRdirServer: - username = "rdir" - else: - username = "console" + else: + username = "console" EMTUtilities.updateEnvironmentPropertiesFile(Environment.environmentConfigurationFilePath, username, environmentProperties) if os.path.islink(Environment.tomcatNative): @@ -592,11 +593,11 @@ def installFile(packageFilePath, logfile, interactive): answer = input(" > ").lower().strip() if answer == "" or answer.startswith("y") or answer.startswith("j"): print(Colors.YELLOW + "Restarting ..." + Colors.DEFAULT) - os.execl(sys.executable, sys.executable, *sys.argv) + os.execl(sys.executable, Environment.scriptFilePath, *sys.argv) sys.exit(0) else: print(Colors.YELLOW + "Restart for Runtime Update..." + Colors.DEFAULT) - os.execl(sys.executable, sys.executable, *sys.argv) + os.execl(sys.executable, Environment.scriptFilePath, *sys.argv) sys.exit(0) elif applicationName == "backend-startup": newBackendVersion = EMTUtilities.getVersionFromFilename(packageFilename) @@ -645,11 +646,11 @@ def installFile(packageFilePath, logfile, interactive): answer = input(" > ").lower().strip() if answer == "" or answer.startswith("y") or answer.startswith("j"): print(Colors.YELLOW + "Restarting ..." + Colors.DEFAULT) - os.execl(sys.executable, sys.executable, *sys.argv) + os.execl(sys.executable, Environment.scriptFilePath, *sys.argv) sys.exit(0) else: print(Colors.YELLOW + "Restart for Update..." + Colors.DEFAULT) - os.execl(sys.executable, sys.executable, *sys.argv) + os.execl(sys.executable, Environment.scriptFilePath, *sys.argv) sys.exit(0) elif applicationName == "backend-merger" or applicationName == "backend-mailer" or applicationName == "backend-mailloop": newBackendVersion = EMTUtilities.getVersionFromFilename(packageFilename) @@ -1459,7 +1460,7 @@ def installFileFromCloud(packageUrl, interactive, alreadyOpenLogFile = None): logfile.write(errorText + "\n") return - if releaseSubdirectoryName is None or releaseSubdirectoryName == "": + if EMTUtilities.isBlank(releaseSubdirectoryName): downloadDestinationFilePath = "/tmp/" + packageFilename else: downloadDestinationFilePath = "/home/" + applicationUserName + "/release/" + releaseSubdirectoryName + "/" + packageFilename @@ -1636,6 +1637,7 @@ def installFileFromWebsite(interactive, alreadyOpenLogFile = None): logfile.write("Release version upgrade: " + versionInfo["new-release-backend-startup"][0] + " " + versionInfo["new-release-backend-startup"][1] + "\n") downloadFileUrlPaths["new-release-backend-startup"] = versionInfo["new-release-backend-startup"][1] print(Colors.YELLOW + "\nStarting release upgrade" + Colors.DEFAULT) + downloadAndInstallNewLicense() elif "new-release-runtime" in versionInfo and interactive: print(Colors.YELLOW + "\nA new release version is available: " + versionInfo["new-release-runtime"][0] + Colors.DEFAULT) print("Do you want to upgrade? (N/y, Blank => Cancel):") @@ -1645,6 +1647,7 @@ def installFileFromWebsite(interactive, alreadyOpenLogFile = None): logfile.write("Release version upgrade: " + versionInfo["new-release-runtime"][0] + " " + versionInfo["new-release-runtime"][1] + "\n") downloadFileUrlPaths["new-release-runtime"] = versionInfo["new-release-runtime"][1] print(Colors.YELLOW + "\nStarting release upgrade" + Colors.DEFAULT) + downloadAndInstallNewLicense() if len(downloadFileUrlPaths) == 0: # Application updates @@ -1948,7 +1951,7 @@ def installFileFromWebsite(interactive, alreadyOpenLogFile = None): logfile.write(errorText + "\n") return - if releaseSubdirectoryName is None or releaseSubdirectoryName == "": + if EMTUtilities.isBlank(releaseSubdirectoryName): downloadDestinationFilePath = "/tmp/" + packageFilename else: if not os.path.isdir("/home/" + applicationUserName + "/release"): @@ -2021,7 +2024,7 @@ def installFileFromWebsite(interactive, alreadyOpenLogFile = None): else: Environment.messages.append("Update package file '" + packageFilename + "' deployed with errors.\nFor logs see '" + os.path.realpath(logfile.name) + "'") - if Environment.getSystemUrl() is None or Environment.getSystemUrl().strip() == "" or Environment.getSystemUrl().strip() == "Unknown" and DbConnector.checkDbServiceAvailable() and DbConnector.checkDbStructureExists(): + if EMTUtilities.isBlank(Environment.getSystemUrl()) or Environment.getSystemUrl().strip() == "Unknown" and DbConnector.checkDbServiceAvailable() and DbConnector.checkDbStructureExists(): Environment.errors.append("Basic configuration is missing. Please configure.") Environment.overrideNextMenu = Environment.configTableMenu except urllib.error.URLError as e: @@ -2342,7 +2345,7 @@ def executeUnattendedUpdate(): logfile = open(logfilePath, "w", encoding="UTF-8") logfile.write("Update started at: " + now.strftime("%Y-%m-%d_%H:%M:%S") + "\n") - if updateFile is None or updateFile.strip() == "": + if EMTUtilities.isBlank(updateFile): logfile.write("Update from website\n") installFileFromWebsite(False, logfile) elif not os.path.isfile(updateFile): @@ -2416,7 +2419,7 @@ def cleanupOldVersions(releaseDirectoryPath, updateVersion, currentVersion, logf maximumVersion = getMaximumVersionForMajorMinorVersion(availableMajorMinorVersions[index], availableVersionsToDelete) if maximumVersion is not None: availableVersionsToDelete.remove(maximumVersion) - + for index in range(0, updateVersionHotfixVersionsToKeep): maximumMajorMinorMicroVersion = getMaximumVersionForMajorMinorMicroVersion(updateVersion, availableVersionsToDelete) if maximumMajorMinorMicroVersion is not None: @@ -2471,3 +2474,66 @@ def getMaximumVersionForMajorMinorMicroVersion(majorMinorMicroVersion, versionLi if maximumMajorMinorVersion is None or EMTUtilities.compareVersions(version, maximumMajorMinorVersion) < 0: maximumMajorMinorVersion = version return maximumMajorMinorVersion + +def downloadAndInstallNewLicense(): + licenseProperties = License.License.readLicenseValues() + if "licenseUrl" in licenseProperties and EMTUtilities.isNotBlank(licenseProperties.get("licenseUrl")): + licenseUrl = licenseProperties.get("licenseUrl") + + if not EMTUtilities.checkDownloadFileIsAvailable(licenseUrl): + print("LicenseUrl: " + licenseUrl + " (not reachable)") + return + else: + if EMTUtilities.debugMode: + print("LicenseUrl: " + licenseUrl) + + downloadDestinationFilePath = "/tmp/emm_license.zip" + if os.path.isfile(downloadDestinationFilePath): + os.remove(downloadDestinationFilePath) + + downloadPageResponse = None + try: + downloadPageResponse = EMTUtilities.openUrlConnection(licenseUrl) + with open(downloadDestinationFilePath, "wb") as downloadDestinationFile: + chunk_size = 8192 + while True: + chunk = downloadPageResponse.read(chunk_size) + + if not chunk: + break + + downloadDestinationFile.write(chunk) + except: + errorText = "Download of new license file failed" + if EMTUtilities.debugMode: + logging.exception(errorText + "\nUrl: " + str(licenseUrl)) + raise + else: + raise Exception(errorText) + finally: + if downloadPageResponse is not None: + downloadPageResponse.close() + + if os.path.isfile(downloadDestinationFilePath): + with tempfile.TemporaryDirectory() as licenseTempDirectory: + if downloadDestinationFilePath.endswith(".zip"): + EMTUtilities.unzipFile(downloadDestinationFilePath, licenseTempDirectory) + elif downloadDestinationFilePath.endswith(".tar.gz"): + EMTUtilities.unTarGzFile(downloadDestinationFilePath, licenseTempDirectory) + + if not os.path.isfile(licenseTempDirectory + "/emm.license.xml") or not os.path.isfile(licenseTempDirectory + "/emm.license.xml.sig"): + Environment.errors.append("Given license file archive does not contain expected files (emm.license.xml, emm.license.xml.sig)") + else: + with open(licenseTempDirectory + "/emm.license.xml", "r", encoding="UTF-8") as licenseDataFileHandle: + licenseData = licenseDataFileHandle.read() + with open(licenseTempDirectory + "/emm.license.xml.sig", "rb") as licenseSignatureFileHandle: + licenseSignature = licenseSignatureFileHandle.read() + + License.updateLicense(licenseData, licenseSignature) + + DbConnector.update("INSERT INTO server_command_tbl(command, server_name, execution_date, admin_id, description, timestamp) VALUES ('RELOAD_LICENSE_DATA', 'ALL', CURRENT_TIMESTAMP, 1, 'New license data uploaded by Maintenance Tool', CURRENT_TIMESTAMP)") + + Environment.messages.append("Successfully installed new license file") + + if os.path.isfile(downloadDestinationFilePath): + os.remove(downloadDestinationFilePath) diff --git a/backend/src/script/tools/EMT_lib/License.py b/backend/src/script/tools/EMT_lib/License.py index d5260b07e..5e6a6a0d6 100644 --- a/backend/src/script/tools/EMT_lib/License.py +++ b/backend/src/script/tools/EMT_lib/License.py @@ -23,7 +23,7 @@ class License: @staticmethod def readLicenseValues(): licenseXmlDocument = None - if DbConnector.checkDbServiceAvailable(): + if DbConnector.checkDbConnection(): data = DbConnector.selectValue("SELECT data FROM license_tbl WHERE name = 'LicenseData'") if data is not None: # Read license information from db @@ -82,12 +82,12 @@ def getLicenseValue(licenseValueName, companyID=0): returnValue = License.licenseProperties[companyID][licenseValueName] elif companyID != 0 and 0 in License.licenseProperties and licenseValueName in License.licenseProperties[0] and EMTUtilities.isNotBlank(License.licenseProperties[0][licenseValueName]): returnValue = License.licenseProperties[0][licenseValueName] - + if returnValue is not None and "unlimited" == returnValue: returnValue = "-1" - + return returnValue - + @staticmethod def getLicenseIntegerValue(licenseValueName, companyID=0): licenseValueString = License.getLicenseValue(licenseValueName, companyID) @@ -112,7 +112,7 @@ def checkLicenseStatus(): Environment.Environment.errors.append("LicenseID is not set in database") elif License.getLicenseValue("licenseID") != dbLicenseId: Environment.Environment.errors.append("LicenseID of database and licensedata do not match") - + if License.getLicenseValue("expirationDate") is not None: today = date.today() expirationDate = datetime.strptime(License.getLicenseValue("expirationDate"), "%d.%m.%Y") diff --git a/backend/src/script/tools/EMT_lib/Menu.py b/backend/src/script/tools/EMT_lib/Menu.py index a7cbd114c..e1513b79f 100644 --- a/backend/src/script/tools/EMT_lib/Menu.py +++ b/backend/src/script/tools/EMT_lib/Menu.py @@ -9,6 +9,7 @@ # # #################################################################################################################################################################################################################################################################### import sys +import os from EMT_lib.Environment import Environment from EMT_lib import Colors @@ -74,6 +75,20 @@ def show(self, startMenu = None): print("Update Channel: " + Environment.updateChannel) print("Hostname: " + Environment.hostname) + harddriveProperties = os.statvfs("/tmp") + hddSize = harddriveProperties.f_frsize * harddriveProperties.f_blocks + hddFreeSize = harddriveProperties.f_frsize * harddriveProperties.f_bfree + hddFreePercentage = (hddFreeSize / hddSize) * 100 + if hddFreePercentage < 10: + color = Colors.RED + elif hddFreePercentage < 20: + color = Colors.YELLOW + else: + color = Colors.GREEN + + hddFreePercentageString = color + "{:.1f} %".format(hddFreePercentage) + Colors.DEFAULT + print("Free diskspace: " + hddFreePercentageString + " (of " + "{:.2f}".format(hddSize / 1024 / 1024 / 1024) + " GiB)") + if Environment.isOpenEmmServer: if License.getLicenseName() is not None: print("OpenEMM License: " + License.getLicenseName() + " (ID: " + License.getLicenseID() + ")") diff --git a/backend/src/script/tools/EMT_lib/SupplementalMenu.py b/backend/src/script/tools/EMT_lib/SupplementalMenu.py index 4a5cee967..4747a8996 100644 --- a/backend/src/script/tools/EMT_lib/SupplementalMenu.py +++ b/backend/src/script/tools/EMT_lib/SupplementalMenu.py @@ -101,7 +101,7 @@ def licenseFileMenuAction(actionParameters): EMTUtilities.unTarGzFile(licenseFilePath, licenseTempDirectory) if not os.path.isfile(licenseTempDirectory + "/emm.license.xml") or not os.path.isfile(licenseTempDirectory + "/emm.license.xml.sig"): - Environment.errors.append("Given licensefile archive does not contain expected files (emm.license.xml, emm.license.xml.sig)") + Environment.errors.append("Given license file archive does not contain expected files (emm.license.xml, emm.license.xml.sig)") else: with open(licenseTempDirectory + "/emm.license.xml", "r", encoding="UTF-8") as licenseDataFileHandle: licenseData = licenseDataFileHandle.read() @@ -119,7 +119,7 @@ def licenseFileMenuAction(actionParameters): os.mkdir("/home/console/tomcat/conf") if os.path.isdir("/home/console/tomcat/conf") and not os.path.isdir("/home/console/tomcat/conf/keys"): os.mkdir("/home/console/tomcat/conf/keys") - shutil.copy(licenseTempDirectory + "/emm.salt", "/home/console/tomcat/conf/keys/server.xml") + shutil.copy(licenseTempDirectory + "/emm.salt", "/home/console/tomcat/conf/keys/emm.salt") Environment.messages.append("Successfully installed emm.salt file") def restartMenuAction(actionParameters): diff --git a/backend/src/script/tools/EMT_lib/TlsCertificateMenu.py b/backend/src/script/tools/EMT_lib/TlsCertificateMenu.py new file mode 100644 index 000000000..58dbdd666 --- /dev/null +++ b/backend/src/script/tools/EMT_lib/TlsCertificateMenu.py @@ -0,0 +1,31 @@ +#################################################################################################################################################################################################################################################################### +# # +# # +# Copyright (C) 2022 AGNITAS AG (https://www.agnitas.org) # +# # +# This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. # +# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. # +# You should have received a copy of the GNU Affero General Public License along with this program. If not, see . # +# # +#################################################################################################################################################################################################################################################################### +import os +import re + +from EMT_lib import EMTUtilities + +from EMT_lib.Environment import Environment + +def executeTlsCertificateMenuAction(actionParameters): + if Environment.frontendUserName == "openemm": + EMTUtilities.manageTlsCertificateForTomcat("/home/openemm/etc/ssl", "/home/openemm/tomcat/conf/server.xml", Environment.applicationName) + elif Environment.frontendUserName == "console": + EMTUtilities.manageTlsCertificateForTomcat("/home/console/sslcert", "/home/console/tomcat/conf/server.xml", Environment.applicationName) + elif Environment.frontendUserName == "rdir": + EMTUtilities.manageTlsCertificateForTomcat("/home/rdir/sslcert", "/home/rdir/tomcat/conf/server.xml", Environment.applicationName) + else: + print("Not a frontend server. 'conf/server.xml' is not available") + + print() + print("Press any key to continue") + choice = input(" > ") + return diff --git a/backend/src/script/tools/OMT.py b/backend/src/script/tools/OMT.py index e51b2826a..a555771a1 100644 --- a/backend/src/script/tools/OMT.py +++ b/backend/src/script/tools/OMT.py @@ -28,12 +28,15 @@ from EMT_lib import VersionSwitchMenu from EMT_lib import ConfigurationMenu from EMT_lib import SupplementalMenu +from EMT_lib import TlsCertificateMenu def configureTool(): Environment.toolName = "OpenEMM Maintenance Tool (OMT)" # OpenEMM at GitHub: https://github.com/agnitas-org/OpenEMM Environment.applicationName = "OpenEMM" + Environment.scriptFilePath = os.path.dirname(os.path.realpath(__file__)) + if os.path.isfile("/home/" + os.getlogin() + "/.OMT/OMT.override.properties"): customProperty = EMTUtilities.readPropertiesFile("/home/" + os.getlogin() + "/.OMT/OMT.override.properties") Environment.applicationUserNamesToCheck = customProperty["Environment.applicationUserNamesToCheck"].split(",") @@ -42,10 +45,12 @@ def configureTool(): Environment.applicationDbcfgEntryDefaultName = "openemm" + Environment.readonlyDbcfgProperties = ["host"] + Environment.agnitasDownloadSiteUrl = "https://www.agnitas.de/download" - Environment.agnitasDownloadPathVersionInfo = Environment.agnitasDownloadSiteUrl + "/openemm-version-22.10/" + Environment.agnitasDownloadPathVersionInfo = Environment.agnitasDownloadSiteUrl + "/openemm-version-23.04/" Environment.updateChannel = None - Environment.agnitasUpdateChannels = {"TEST": Environment.agnitasDownloadSiteUrl + "/openemm-version-22.10_TEST/"} + Environment.agnitasUpdateChannels = {"TEST": Environment.agnitasDownloadSiteUrl + "/openemm-version-23.04_TEST/"} Environment.defaultLandingPage = None Environment.multiServerSystem = False @@ -94,19 +99,10 @@ def main(): Environment.init() if "-update" in sys.argv: - errorsOccurred = InstallAndUpdateMenu.executeUnattendedUpdate() + InstallAndUpdateMenu.executeUnattendedUpdate() elif "-check" in sys.argv: foundError = CheckMenu.executeCheck() sys.exit(1 if foundError else 0) - else: - foundApplicationUserName = False - for applicationUserNameToCheck in Environment.applicationUserNamesToCheck: - if Environment.username == applicationUserNameToCheck: - foundApplicationUserName = True - break - if not foundApplicationUserName and not EMTUtilities.hasRootPermissions() and not EMTUtilities.isDebugMode(): - print(Colors.RED + "\nYou must start this program as one of the allowed users '" + "', '".join(Environment.applicationUserNamesToCheck) + "' or with root permissions (sudo)!" + Colors.DEFAULT + "\n") - sys.exit(1) menu = Menu("Main") @@ -119,6 +115,7 @@ def main(): configurationMenu.addSubMenu(Environment.configTableMenu) if Environment.systemCfgFilePath is not None and os.path.isfile(Environment.systemCfgFilePath): configurationMenu.addSubMenu(Menu("Change system.cfg").setAction(ConfigurationMenu.systemCfgMenuAction)) + configurationMenu.addSubMenu(Menu("Change layout images", lambda: DbConnector.checkDbStructureExists()).setAction(LayoutMenu.layoutImagesTableMenuAction)) configurationMenu.addSubMenu(Menu("Change client/account configuration", lambda: DbConnector.checkDbStructureExists()).setAction(ConfigurationMenu.clientMenuAction)) configurationMenu.addSubMenu(Menu("Change jobqueue configuration", lambda: DbConnector.checkDbStructureExists()).setAction(ConfigurationMenu.jobqueueMenuAction)) if Environment.defaultLandingPage is not None: @@ -134,6 +131,8 @@ def main(): securityMenu.addSubMenu(Menu("Create new initial 'emm-master' password", lambda: DbConnector.checkDbStructureExists()).setAction(SupplementalMenu.masterPasswordMenuAction)) if Environment.isEmmFrontendServer: securityMenu.addSubMenu(Menu("Install license file", lambda: DbConnector.checkDbStructureExists()).setAction(SupplementalMenu.licenseFileMenuAction)) + if Environment.isOpenEmmServer: + securityMenu.addSubMenu(Menu("Configure TLS certificate (https)", None).setAction(TlsCertificateMenu.executeTlsCertificateMenuAction)) menu.addSubMenu(Menu("Install or update package from AGNITAS Website", lambda: Environment.agnitasDownloadSiteUrlReachable and Environment.agnitasDownloadPathVersionInfo is not None).setAction(InstallAndUpdateMenu.siteUpdateMenuAction)) menu.addSubMenu(Menu("Install or update package from local file").setAction(InstallAndUpdateMenu.fileUpdateMenuAction)) @@ -144,22 +143,22 @@ def main(): menu.addSubMenu(Menu("Send configuration and log files in email", lambda: DbConnector.checkDbStructureExists()).setAction(ApplicationStatusMenu.sendConfigAndLogsAction)) try: - if Environment.javaHome is None or Environment.javaHome == "" or not EMTUtilities.checkJavaAvailable(Environment.javaHome): + if EMTUtilities.isBlank(Environment.javaHome) or not EMTUtilities.checkJavaAvailable(Environment.javaHome): Environment.errors.append("Basic webapplication configuration for JAVA is missing or invalid. Please configure.") Environment.overrideNextMenu = basicWebappMenu - elif Environment.catalinaHome is None or Environment.catalinaHome == "" or not EMTUtilities.checkTomcatAvailable(Environment.javaHome, Environment.catalinaHome): + elif EMTUtilities.isBlank(Environment.catalinaHome) or not EMTUtilities.checkTomcatAvailable(Environment.javaHome, Environment.catalinaHome): Environment.errors.append("Basic webapplication configuration for Tomcat/CatalinaHome is missing or invalid. Please configure.") Environment.overrideNextMenu = basicWebappMenu - elif not Environment.isEmmRdirServer and (Environment.wkhtmltopdf is None or Environment.wkhtmltopdf == "" or not os.path.isfile(Environment.wkhtmltopdf)): + elif not Environment.isEmmRdirServer and (EMTUtilities.isBlank(Environment.wkhtmltopdf) or not os.path.isfile(Environment.wkhtmltopdf)): Environment.errors.append("Basic webapplication configuration for WKHTML (wkhtmltopdf) is missing or invalid. Please configure.") Environment.overrideNextMenu = basicWebappMenu - elif not Environment.isEmmRdirServer and (Environment.wkhtmltoimage is None or Environment.wkhtmltoimage == "" or not os.path.isfile(Environment.wkhtmltoimage)): + elif not Environment.isEmmRdirServer and (EMTUtilities.isBlank(Environment.wkhtmltoimage) or not os.path.isfile(Environment.wkhtmltoimage)): Environment.errors.append("Basic webapplication configuration for WKHTML (wkhtmltoimage) is missing or invalid. Please configure.") Environment.overrideNextMenu = basicWebappMenu elif (DbConnector.emmDbVendor is None or not DbConnector.checkDbServiceAvailable()) and Environment.username != "mailout": Environment.errors.append("Database is not running or host is invalid. Please configure.") Environment.overrideNextMenu = dbcfgMenu - elif Environment.getSystemUrl() is None or Environment.getSystemUrl().strip() == "" or Environment.getSystemUrl().strip() == "Unknown" and DbConnector.checkDbServiceAvailable() and DbConnector.checkDbStructureExists(): + elif EMTUtilities.isBlank(Environment.getSystemUrl()) or Environment.getSystemUrl().strip() == "Unknown" and DbConnector.checkDbServiceAvailable() and DbConnector.checkDbStructureExists(): Environment.errors.append("Basic configuration is missing. Please configure.") Environment.overrideNextMenu = Environment.configTableMenu menu.show() diff --git a/backend/src/script/tools/OMT.sh b/backend/src/script/tools/OMT.sh index 78b580bfc..e657d5a36 100644 --- a/backend/src/script/tools/OMT.sh +++ b/backend/src/script/tools/OMT.sh @@ -42,7 +42,7 @@ if [ ! $? -eq 0 ]; then { if [ "${osVendor}" == "Suse" ]; then { echo "Commands to install python runtime:" echo "zypper install -y python python-xml python3-pip python-gdbm" - echo "pip3 install pydns xlrd xlwt xlutils paramiko pyspf ipaddr dnspython pydkim pycrypto requests httpie setproctitle inotify" + echo "pip3 install pydns paramiko pyspf ipaddr dnspython pydkim pycrypto requests httpie setproctitle inotify aiohttp aiohttp-xmlrpc msgpack websockets asyncinotify asyncssh" exit 1 } else { echo "Command to install python runtime: 'sudo yum -y install python'" @@ -156,4 +156,6 @@ if [ ! $? -eq 0 ]; then { } fi echo "Starting python ..." +python_home="$(dirname "$(dirname "${PYTHON}")")" +export PYTHONHOME=${python_home} ${PYTHON} "${scriptDir}/../scripts/OMT.py" $@ diff --git a/backend/src/script/tools/dkim-mgr3.py b/backend/src/script/tools/dkim-mgr3.py index e2aa11e1a..59c9d81c4 100644 --- a/backend/src/script/tools/dkim-mgr3.py +++ b/backend/src/script/tools/dkim-mgr3.py @@ -24,7 +24,7 @@ from agn3.tools import silent_call # class DKIM (CLI): - __slots__ = ['company_id', 'verbose', 'newkey', 'display_key', 'validate_only', 'dns_options', 'parameter'] + __slots__ = ['company_id', 'verbose', 'newkey', 'display_key', 'validate_only', 'skip_validation', 'dns_options', 'parameter'] def add_arguments (self, parser: argparse.ArgumentParser) -> None: parser.add_argument ( '-c', '--company-id', action = 'store', type = int, dest = 'company_id', @@ -46,6 +46,10 @@ def add_arguments (self, parser: argparse.ArgumentParser) -> None: '-V', '--validate-only', action = 'store_true', dest = 'validate_only', help = 'validate only' ) + parser.add_argument ( + '-S', '--skip-validation', action = 'store_true', dest = 'skip_validation', + help = 'do not validate DNS entry at all' + ) parser.add_argument ( '--dns-tcp', action = 'store_true', dest = 'dns_tcp', help = 'use TCP for DNS operations instead of UDP' @@ -64,6 +68,9 @@ def use_arguments (self, args: argparse.Namespace) -> None: self.newkey = args.newkey self.display_key = args.display_key self.validate_only = args.validate_only + self.skip_validation = args.skip_validation + if self.validate_only and self.skip_validation: + raise error ('either just valid or skip validation, not both, supported') self.dns_options: Dict[str, Any] = {} if args.dns_tcp: self.dns_options['protocol'] = 'TCP' @@ -106,7 +113,7 @@ def executor (self) -> bool: valid_end = datetime.fromordinal (valid_end.toordinal () + 1) with open (key_file) as fd: key = fd.read () - if self.validate (domain, selector, key) and not self.validate_only: + if (self.skip_validation or self.validate (domain, selector, key)) and not self.validate_only: self.insert (domain, selector, key, valid_start, valid_end) else: self.show ()