I'm using a C lib with C#.
This is the prototype of the function that I want to use:
heatmap_render_default_to(const heatmap_t* h, unsigned char* colorbuf)
This function is allocating memory for colorbuf like this:
colorbuf = (unsigned char*)malloc(h->w*h->h * 4);
to call my function from c# I tried at first to create an unmanaged memory like this:
string image = "";
//allocate from COM heap
Marshal.StringToCoTaskMemAnsi(image);
GCHandle gch = GCHandle.Alloc(image, GCHandleType.Pinned);
HeatMap.HeatMapWrapper.NativeMethods.Render_default_to(hmPtr, image);
But I'm getting this exception:
Exception thrown at 0x0F17263A (EasyDLL.dll) in Test.exe: 0xC0000005: Access violation writing location 0x01050000.
If there is a handler for this exception, the program may be safely continued.
It's the first time that I try to integrate an unmanaged library in c#.
Could someone help me with this ?
Pinvoke:
[DllImport(DLL, EntryPoint = "heatmap_render_default_to", CallingConvention = CallingConvention.Cdecl)]
public static extern string Render_default_to(IntPtr h, byte[] colorbuf);
[DllImport(DLL, EntryPoint = "heatmap_render_to", CallingConvention = CallingConvention.Cdecl)]
public static extern string Render_to(IntPtr h, IntPtr colorscheme, byte[] colorbuf);
[DllImport(DLL, EntryPoint = " heatmap_render_saturated_to", CallingConvention = CallingConvention.Cdecl)]
public static extern string Render_saturated_to(IntPtr h, IntPtr colorscheme, float saturation, byte[] colorbuf);
this is C code:
__declspec(dllexport) unsigned char* __cdecl heatmap_render_default_to(const heatmap_t* h, unsigned char* colorbuf)
{
return heatmap_render_to(h, heatmap_cs_default, colorbuf);
}
__declspec(dllexport) unsigned char* heatmap_render_to(const heatmap_t* h, const heatmap_colorscheme_t* colorscheme, unsigned char* colorbuf)
{
return heatmap_render_saturated_to(h, colorscheme, h->max > 0.0f ? h->max : 1.0f, colorbuf);
}
__declspec(dllexport) unsigned char* __cdecl heatmap_render_saturated_to(const heatmap_t* h, const heatmap_colorscheme_t* colorscheme, float saturation, unsigned char* colorbuf)
{
unsigned y;
assert(saturation > 0.0f);
/* For convenience, if no buffer is given, malloc a new one. */
if (!colorbuf) {
colorbuf = (unsigned char*)malloc(h->w*h->h * 4);
if (!colorbuf) {
return 0;
}
}
/* TODO: could actually even flatten this loop before parallelizing it. */
/* I.e., to go i = 0 ; i < h*w since I don't have any padding! (yet?) */
for (y = 0; y < h->h; ++y) {
float* bufline = h->buf + y*h->w;
unsigned char* colorline = colorbuf + 4 * y*h->w;
unsigned x;
for (x = 0; x < h->w; ++x, ++bufline) {
/* Saturate the heat value to the given saturation, and then
* normalize by that.
*/
const float val = (*bufline > saturation ? saturation : *bufline) / saturation;
/* We add 0.5 in order to do real rounding, not just dropping the
* decimal part. That way we are certain the highest value in the
* colorscheme is actually used.
*/
const size_t idx = (size_t)((float)(colorscheme->ncolors - 1)*val + 0.5f);
/* This is probably caused by a negative entry in the stamp! */
assert(val >= 0.0f);
/* This should never happen. It is likely a bug in this library. */
assert(idx < colorscheme->ncolors);
/* Just copy over the color from the colorscheme. */
memcpy(colorline, colorscheme->colors + idx * 4, 4);
colorline += 4;
}
}
return colorbuf;
}
See Question&Answers more detail:
os