[sr-dev] [kamailio/kamailio] NATS Module not working (Issue #3253)

iliesh notifications at github.com
Thu Sep 29 11:17:51 CEST 2022


Hi @linuxmaniac, @eschmidbauer - unfortunately the patch didn't work either, kamailio still crashing with the same error: 

```
INFO: nats [nats_mod.c:437]: nats_init_connection(): adding server [nats://192.168.1.1:4222] [1]
ERROR: nats [nats_mod.c:456]: nats_init_connection(): could not create nats options [System Error]
ERROR: nats [nats_mod.c:230]: init_worker(): failed to init nat connections
ERROR: nats [nats_mod.c:346]: mod_child_init(): failed to init struct for worker [0]
```

```
#0  __pthread_kill_implementation (threadid=<optimized out>, signo=signo at entry=6, no_tid=no_tid at entry=0) at pthread_kill.c:44
#1  0x00007fa7a25445b3 in __pthread_kill_internal (signo=6, threadid=<optimized out>) at pthread_kill.c:78
#2  0x00007fa7a24f7ce6 in __GI_raise (sig=sig at entry=6) at ../sysdeps/posix/raise.c:26
#3  0x00007fa7a24cb7f3 in __GI_abort () at abort.c:79
#4  0x00000000006fbc43 in qm_debug_check_frag (qm=0x7fa79cc5c000, f=0x7fa79cd3de40, file=0x7fa79cc33688 "nats: nats_mod.c", line=593, efile=0x882e79 "core/mem/q_malloc.c", eline=511)
    at core/mem/q_malloc.c:129
#5  0x00000000006ffe6b in qm_free (qmp=0x7fa79cc5c000, p=0x7fa79cd3de78, file=0x7fa79cc33688 "nats: nats_mod.c", func=0x7fa79cc34360 <__func__.13> "nats_destroy_workers", line=593,
    mname=0x7fa79cc33000 "nats") at core/mem/q_malloc.c:511
#6  0x000000000070a971 in qm_shm_free (qmp=0x7fa79cc5c000, p=0x7fa79cd3de78, file=0x7fa79cc33688 "nats: nats_mod.c", func=0x7fa79cc34360 <__func__.13> "nats_destroy_workers", line=593,
    mname=0x7fa79cc33000 "nats") at core/mem/q_malloc.c:1350
#7  0x00007fa79cc2dd89 in nats_destroy_workers () at nats_mod.c:593
#8  0x00007fa79cc2ddce in mod_destroy () at nats_mod.c:604
#9  0x0000000000636b22 in destroy_modules () at core/sr_module.c:842
#10 0x000000000041f059 in cleanup (show_status=0) at main.c:561
#11 0x0000000000420ac1 in shutdown_children (sig=15, show_status=0) at main.c:704
#12 0x0000000000439a81 in main (argc=5, argv=0x7ffe957de5a8) at main.c:3093
```

```
#0  __pthread_kill_implementation (threadid=<optimized out>, signo=signo at entry=6, no_tid=no_tid at entry=0) at pthread_kill.c:44
        tid = <optimized out>
        ret = 0
        pd = <optimized out>
        old_mask = {__val = {16949132684318090892, 1844065090856224981, 0, 0, 0, 0, 4410266781315238445, 10, 0, 0, 0, 0, 0, 0, 0, 0}}
        ret = <optimized out>
#1  0x00007fa7a25445b3 in __pthread_kill_internal (signo=6, threadid=<optimized out>) at pthread_kill.c:78
No locals.
#2  0x00007fa7a24f7ce6 in __GI_raise (sig=sig at entry=6) at ../sysdeps/posix/raise.c:26
        ret = <optimized out>
#3  0x00007fa7a24cb7f3 in __GI_abort () at abort.c:79
        save_stage = 1
        act = {__sigaction_handler = {sa_handler = 0x0, sa_sigaction = 0x0}, sa_mask = {__val = {0, 0, 0, 1880844493789993498, 1880844493789993498, 1880844493789993498, 1880844493789993498, 0,
              0, 0, 0, 2314885530818453536, 2314885530818453536, 2314885530818453536, 2314885530818453536, 0}}, sa_flags = 0, sa_restorer = 0x0}
        sigs = {__val = {32, 0, 0, 0, 13816831175882180370, 13816973010994691967, 13816973010643234476, 13816973012072644543, 0, 0, 0, 0, 4702111234474983745, 4702111234474983745,
            4702111234474983745, 4702111234474983745}}
#4  0x00000000006fbc43 in qm_debug_check_frag (qm=0x7fa79cc5c000, f=0x7fa79cd3de40, file=0x7fa79cc33688 "nats: nats_mod.c", line=593, efile=0x882e79 "core/mem/q_malloc.c", eline=511)
    at core/mem/q_malloc.c:129
        p = 0x501000001ff
        __func__ = "qm_debug_check_frag"
#5  0x00000000006ffe6b in qm_free (qmp=0x7fa79cc5c000, p=0x7fa79cd3de78, file=0x7fa79cc33688 "nats: nats_mod.c", func=0x7fa79cc34360 <__func__.13> "nats_destroy_workers", line=593,
    mname=0x7fa79cc33000 "nats") at core/mem/q_malloc.c:511
        qm = 0x7fa79cc5c000
        f = 0x7fa79cd3de40
        size = 140731406474448
        next = 0x1
        prev = 0x7fa79cc956c8
        __func__ = "qm_free"
#6  0x000000000070a971 in qm_shm_free (qmp=0x7fa79cc5c000, p=0x7fa79cd3de78, file=0x7fa79cc33688 "nats: nats_mod.c", func=0x7fa79cc34360 <__func__.13> "nats_destroy_workers", line=593,
    mname=0x7fa79cc33000 "nats") at core/mem/q_malloc.c:1350
No locals.
#7  0x00007fa79cc2dd89 in nats_destroy_workers () at nats_mod.c:593
        i = 1
        worker = 0x7fa79cd3dc18
        pub_worker = 0x7fa79cd3de78
        __func__ = "nats_destroy_workers"
#8  0x00007fa79cc2ddce in mod_destroy () at nats_mod.c:604
        __func__ = "mod_destroy"
#9  0x0000000000636b22 in destroy_modules () at core/sr_module.c:842
        t = 0x7fa7a1d426a8
        foo = 0x7fa7a1d421d0
        __func__ = "destroy_modules"
#10 0x000000000041f059 in cleanup (show_status=0) at main.c:561
        memlog = 0
        __func__ = "cleanup"
#11 0x0000000000420ac1 in shutdown_children (sig=15, show_status=0) at main.c:704
        __func__ = "shutdown_children"
#12 0x0000000000439a81 in main (argc=5, argv=0x7ffe957de5a8) at main.c:3093
        cfg_stream = 0x2473380
        c = -1
        r = 0
        tmp = 0xc5 <error: Cannot access memory at address 0xc5>
        tmp_len = 0
        port = 0
        proto = 0
        ahost = 0x0
        aport = 0
        options = 0x8152f8 ":f:cm:M:dVIhEeb:l:L:n:vKrRDTN:W:w:t:u:g:P:G:SQ:O:a:A:x:X:Y:"
        ret = -1
        seed = 532749240
        rfd = 4
        debug_save = 0
        debug_flag = 0
        dont_fork_cnt = 0
        n_lst = 0x300000000
        p = 0x40 <error: Cannot access memory at address 0x40>
        st = {st_dev = 25, st_ino = 1308, st_nlink = 2, st_mode = 16832, st_uid = 1001, st_gid = 1002, __pad0 = 0, st_rdev = 0, st_size = 40, st_blksize = 4096, st_blocks = 0, st_atim = {
            tv_sec = 1664442191, tv_nsec = 175708131}, st_mtim = {tv_sec = 1664442396, tv_nsec = 174674806}, st_ctim = {tv_sec = 1664442396, tv_nsec = 174674806}, __glibc_reserved = {0, 0, 0}}
        tbuf = "\342\374}\242\247\177\000\000\070\273z\242\247\177\000\000 \342}\225\376\177\000\000\a\000\000\000\000\000\000\000بz\242\247\177\000\000\a\000\000\000#\000\000\000\060\n~\242\247\177\000\000\225\326{\242\247\177\000\000\000\000\000\000\000\000\000\000\200\351{\242\247\177\000\000\070\060K\242\247\177\000\000Ѝz\242\247\177\000\000\000\000\000\000\000\000\000\000\360\341}\225\376\177\000\000\360\376}\242\247\177\000\000غz\242\247\177\000\000غz\242\247\177\000\000\000\000\000\000\000\000\000\000\230\234z\242\247\177\000\000\360\341}\225\376\177\000\000\220\025~\242\247\177\000\000\000\240z\242\247\177\000\000\022\254z\242\247\177\000\000\066\241{\242\247\177\000\000\344SJ\242\247\177\000\000\272\070}\242"...
        option_index = 0
        long_options = {{name = 0x817966 "help", has_arg = 0, flag = 0x0, val = 104}, {name = 0x812514 "version", has_arg = 0, flag = 0x0, val = 118}, {name = 0x81796b "alias", has_arg = 1,
            flag = 0x0, val = 1024}, {name = 0x817971 "subst", has_arg = 1, flag = 0x0, val = 1025}, {name = 0x817977 "substdef", has_arg = 1, flag = 0x0, val = 1026}, {
            name = 0x817980 "substdefs", has_arg = 1, flag = 0x0, val = 1027}, {name = 0x81798a "server-id", has_arg = 1, flag = 0x0, val = 1028}, {name = 0x817994 "loadmodule", has_arg = 1,
            flag = 0x0, val = 1029}, {name = 0x81799f "modparam", has_arg = 1, flag = 0x0, val = 1030}, {name = 0x8179a8 "log-engine", has_arg = 1, flag = 0x0, val = 1031}, {
            name = 0x8179b3 "debug", has_arg = 1, flag = 0x0, val = 1032}, {name = 0x8179b9 "cfg-print", has_arg = 0, flag = 0x0, val = 1033}, {name = 0x8179c3 "atexit", has_arg = 1, flag = 0x0,
            val = 1034}, {name = 0x0, has_arg = 0, flag = 0x0, val = 0}}
        __func__ = "main"
```

-- 
Reply to this email directly or view it on GitHub:
https://github.com/kamailio/kamailio/issues/3253#issuecomment-1262003574
You are receiving this because you are subscribed to this thread.

Message ID: <kamailio/kamailio/issues/3253/1262003574 at github.com>
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.kamailio.org/pipermail/sr-dev/attachments/20220929/2a330ed3/attachment-0001.htm>


More information about the sr-dev mailing list