1
- //
1
+ //
2
2
// Copyright (c) .NET Foundation and Contributors
3
3
// Portions Copyright (c) Microsoft Corporation. All rights reserved.
4
4
// See LICENSE file in the project root for full license information.
@@ -12,11 +12,11 @@ void CLR_RT_HeapCluster::HeapCluster_Initialize(CLR_UINT32 size, CLR_UINT32 bloc
12
12
NATIVE_PROFILE_CLR_CORE ();
13
13
GenericNode_Initialize ();
14
14
15
- size = (size - sizeof (*this )) / sizeof (struct CLR_RT_HeapBlock );
15
+ size = (size - sizeof (*this )) / sizeof (CLR_RT_HeapBlock);
16
16
17
- m_freeList.DblLinkedList_Initialize (); // CLR_RT_DblLinkedList m_freeList;
18
- m_payloadStart = ( CLR_RT_HeapBlock_Node *) &this [1 ]; // CLR_RT_HeapBlock_Node* m_payloadStart ;
19
- m_payloadEnd = &m_payloadStart[size]; // CLR_RT_HeapBlock_Node* m_payloadEnd;
17
+ m_freeList.DblLinkedList_Initialize ();
18
+ m_payloadStart = static_cast < CLR_RT_HeapBlock_Node *>( &this [1 ]) ;
19
+ m_payloadEnd = &m_payloadStart[size];
20
20
21
21
// Scan memory looking for possible objects to salvage
22
22
CLR_RT_HeapBlock_Node *ptr = m_payloadStart;
@@ -26,7 +26,7 @@ void CLR_RT_HeapCluster::HeapCluster_Initialize(CLR_UINT32 size, CLR_UINT32 bloc
26
26
{
27
27
if (ptr->DataType () == DATATYPE_WEAKCLASS)
28
28
{
29
- CLR_RT_HeapBlock_WeakReference *weak = (CLR_RT_HeapBlock_WeakReference *)ptr;
29
+ CLR_RT_HeapBlock_WeakReference *weak = (CLR_RT_HeapBlock_WeakReference *)( ptr) ;
30
30
31
31
if (weak->DataSize () == CONVERTFROMSIZETOHEAPBLOCKS (sizeof (*weak)) && weak->m_targetSerialized != NULL &&
32
32
(weak->m_identity .m_flags & CLR_RT_HeapBlock_WeakReference::WR_SurviveBoot))
@@ -62,7 +62,7 @@ void CLR_RT_HeapCluster::HeapCluster_Initialize(CLR_UINT32 size, CLR_UINT32 bloc
62
62
}
63
63
}
64
64
65
- if ((unsigned int )(ptr + blockSize) > (unsigned int )end)
65
+ if ((uintptr_t )(ptr + blockSize) > (uintptr_t )end)
66
66
{
67
67
blockSize = (CLR_UINT32)(end - ptr);
68
68
}
@@ -208,7 +208,7 @@ void CLR_RT_HeapCluster::RecoverFromGC()
208
208
NATIVE_PROFILE_CLR_CORE ();
209
209
210
210
CLR_RT_HeapBlock_Node *ptr = m_payloadStart;
211
- CLR_RT_HeapBlock_Node *end = m_payloadEnd;
211
+ CLR_RT_HeapBlock_Node const *end = m_payloadEnd;
212
212
213
213
//
214
214
// Open the free list.
@@ -285,12 +285,14 @@ void CLR_RT_HeapCluster::RecoverFromGC()
285
285
CLR_RT_HeapBlock_Node *CLR_RT_HeapCluster::InsertInOrder (CLR_RT_HeapBlock_Node *node, CLR_UINT32 size)
286
286
{
287
287
NATIVE_PROFILE_CLR_CORE ();
288
- CLR_RT_HeapBlock_Node *ptr;
288
+ CLR_RT_HeapBlock_Node *ptr = nullptr ;
289
289
290
290
NANOCLR_FOREACH_NODE__NODECL (CLR_RT_HeapBlock_Node, ptr, m_freeList)
291
291
{
292
292
if (ptr > node)
293
+ {
293
294
break ;
295
+ }
294
296
}
295
297
NANOCLR_FOREACH_NODE_END ();
296
298
0 commit comments